summaryrefslogtreecommitdiff
path: root/compiler/optimizing/nodes.h
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/nodes.h')
-rw-r--r--compiler/optimizing/nodes.h266
1 files changed, 190 insertions, 76 deletions
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 0f2c1cffee..19614f11c6 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -35,6 +35,7 @@
#include "mirror/class.h"
#include "offsets.h"
#include "primitive.h"
+#include "utils/array_ref.h"
namespace art {
@@ -240,8 +241,9 @@ class HGraph : public ArenaObject<kArenaAllocGraph> {
// put deoptimization instructions, etc.
void TransformLoopHeaderForBCE(HBasicBlock* header);
- // Removes `block` from the graph.
- void DeleteDeadBlock(HBasicBlock* block);
+ // Removes `block` from the graph. Assumes `block` has been disconnected from
+ // other blocks and has no instructions or phis.
+ void DeleteDeadEmptyBlock(HBasicBlock* block);
// Splits the edge between `block` and `successor` while preserving the
// indices in the predecessor/successor lists. If there are multiple edges
@@ -350,8 +352,6 @@ class HGraph : public ArenaObject<kArenaAllocGraph> {
HCurrentMethod* GetCurrentMethod();
- HBasicBlock* FindCommonDominator(HBasicBlock* first, HBasicBlock* second) const;
-
const DexFile& GetDexFile() const {
return dex_file_;
}
@@ -371,6 +371,11 @@ class HGraph : public ArenaObject<kArenaAllocGraph> {
bool HasTryCatch() const { return has_try_catch_; }
void SetHasTryCatch(bool value) { has_try_catch_ = value; }
+ // Returns an instruction with the opposite boolean value from 'cond'.
+ // The instruction has been inserted into the graph, either as a constant, or
+ // before cursor.
+ HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
+
private:
void FindBackEdges(ArenaBitVector* visited);
void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
@@ -661,6 +666,9 @@ class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
return successors_;
}
+ ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
+ ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
+
bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
return ContainsElement(successors_, block, start_from);
}
@@ -811,12 +819,6 @@ class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
return GetPredecessorIndexOf(predecessor) == idx;
}
- // Returns the number of non-exceptional successors. SsaChecker ensures that
- // these are stored at the beginning of the successor list.
- size_t NumberOfNormalSuccessors() const {
- return EndsWithTryBoundary() ? 1 : GetSuccessors().size();
- }
-
// Create a new block between this block and its predecessors. The new block
// is added to the graph, all predecessor edges are relinked to it and an edge
// is created to `this`. Returns the new empty block. Reverse post order or
@@ -837,6 +839,15 @@ class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
// blocks are consistent (for example ending with a control flow instruction).
HBasicBlock* SplitAfter(HInstruction* cursor);
+ // Split catch block into two blocks after the original move-exception bytecode
+ // instruction, or at the beginning if not present. Returns the newly created,
+ // latter block, or nullptr if such block could not be created (must be dead
+ // in that case). Note that this method just updates raw block information,
+ // like predecessors, successors, dominators, and instruction list. It does not
+ // update the graph, reverse post order, loop information, nor make sure the
+ // blocks are consistent (for example ending with a control flow instruction).
+ HBasicBlock* SplitCatchBlockAfterMoveException();
+
// Merge `other` at the end of `this`. Successors and dominated blocks of
// `other` are changed to be successors and dominated blocks of `this`. Note
// that this method does not update the graph, reverse post order, loop
@@ -1084,13 +1095,20 @@ class HLoopInformationOutwardIterator : public ValueObject {
M(UShr, BinaryOperation) \
M(Xor, BinaryOperation) \
+#ifndef ART_ENABLE_CODEGEN_arm
#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
+#else
+#define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \
+ M(ArmDexCacheArraysBase, Instruction)
+#endif
#ifndef ART_ENABLE_CODEGEN_arm64
#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
#else
#define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \
- M(Arm64IntermediateAddress, Instruction)
+ M(Arm64DataProcWithShifterOp, Instruction) \
+ M(Arm64IntermediateAddress, Instruction) \
+ M(Arm64MultiplyAccumulate, Instruction)
#endif
#define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
@@ -1430,7 +1448,7 @@ class SideEffects : public ValueObject {
return flags_ == (kAllChangeBits | kAllDependOnBits);
}
- // Returns true if this may read something written by other.
+ // Returns true if `this` may read something written by `other`.
bool MayDependOn(SideEffects other) const {
const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
return (other.flags_ & depends_on_flags);
@@ -1620,6 +1638,11 @@ class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
return holder_;
}
+
+ bool IsFromInlinedInvoke() const {
+ return GetParent() != nullptr;
+ }
+
private:
// Record instructions' use entries of this environment for constant-time removal.
// It should only be called by HInstruction when a new environment use is added.
@@ -1725,6 +1748,13 @@ class ReferenceTypeInfo : ValueObject {
return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
}
+ bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
+ DCHECK(IsValid());
+ DCHECK(rti.IsValid());
+ return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
+ GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
+ }
+
// Returns true if the type information provide the same amount of details.
// Note that it does not mean that the instructions have the same actual type
// (because the type can be the result of a merge).
@@ -1927,6 +1957,14 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> {
// Move `this` instruction before `cursor`.
void MoveBefore(HInstruction* cursor);
+ // Move `this` before its first user and out of any loops. If there is no
+ // out-of-loop user that dominates all other users, move the instruction
+ // to the end of the out-of-loop common dominator of the user's blocks.
+ //
+ // This can be used only on non-throwing instructions with no side effects that
+ // have at least one use but no environment uses.
+ void MoveBeforeFirstUserAndOutOfLoops();
+
#define INSTRUCTION_TYPE_CHECK(type, super) \
bool Is##type() const { return (As##type() != nullptr); } \
virtual const H##type* As##type() const { return nullptr; } \
@@ -2390,6 +2428,10 @@ class HTryBoundary : public HTemplateInstruction<0> {
// Returns the block's non-exceptional successor (index zero).
HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
+ ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
+ return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
+ }
+
// Returns whether `handler` is among its exception handlers (non-zero index
// successors).
bool HasExceptionHandler(const HBasicBlock& handler) const {
@@ -2417,25 +2459,6 @@ class HTryBoundary : public HTemplateInstruction<0> {
DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
};
-// Iterator over exception handlers of a given HTryBoundary, i.e. over
-// exceptional successors of its basic block.
-class HExceptionHandlerIterator : public ValueObject {
- public:
- explicit HExceptionHandlerIterator(const HTryBoundary& try_boundary)
- : block_(*try_boundary.GetBlock()), index_(block_.NumberOfNormalSuccessors()) {}
-
- bool Done() const { return index_ == block_.GetSuccessors().size(); }
- HBasicBlock* Current() const { return block_.GetSuccessors()[index_]; }
- size_t CurrentSuccessorIndex() const { return index_; }
- void Advance() { ++index_; }
-
- private:
- const HBasicBlock& block_;
- size_t index_;
-
- DISALLOW_COPY_AND_ASSIGN(HExceptionHandlerIterator);
-};
-
// Deoptimize to interpreter, upon checking a condition.
class HDeoptimize : public HTemplateInstruction<1> {
public:
@@ -2604,6 +2627,11 @@ class HBinaryOperation : public HExpression<2> {
VLOG(compiler) << DebugName() << " is not defined for the (long, int) case.";
return nullptr;
}
+ virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
+ HNullConstant* y ATTRIBUTE_UNUSED) const {
+ VLOG(compiler) << DebugName() << " is not defined for the (null, null) case.";
+ return nullptr;
+ }
// Returns an input that can legally be used as the right input and is
// constant, or null.
@@ -2694,6 +2722,10 @@ class HEqual : public HCondition {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
+ HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
+ HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(1);
+ }
DECLARE_INSTRUCTION(Equal);
@@ -2726,6 +2758,10 @@ class HNotEqual : public HCondition {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
+ HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
+ HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(0);
+ }
DECLARE_INSTRUCTION(NotEqual);
@@ -3227,7 +3263,7 @@ class HInvoke : public HInstruction {
void SetIntrinsic(Intrinsics intrinsic, IntrinsicNeedsEnvironmentOrCache needs_env_or_cache);
bool IsFromInlinedInvoke() const {
- return GetEnvironment()->GetParent() != nullptr;
+ return GetEnvironment()->IsFromInlinedInvoke();
}
bool CanThrow() const OVERRIDE { return true; }
@@ -3395,28 +3431,48 @@ class HInvokeStaticOrDirect : public HInvoke {
MethodReference target_method,
DispatchInfo dispatch_info,
InvokeType original_invoke_type,
- InvokeType invoke_type,
+ InvokeType optimized_invoke_type,
ClinitCheckRequirement clinit_check_requirement)
: HInvoke(arena,
number_of_arguments,
- // There is one extra argument for the HCurrentMethod node, and
- // potentially one other if the clinit check is explicit, and one other
- // if the method is a string factory.
- 1u + (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u)
- + (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u),
+ // There is potentially one extra argument for the HCurrentMethod node, and
+ // potentially one other if the clinit check is explicit, and potentially
+ // one other if the method is a string factory.
+ (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
+ (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) +
+ (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u),
return_type,
dex_pc,
method_index,
original_invoke_type),
- invoke_type_(invoke_type),
+ optimized_invoke_type_(optimized_invoke_type),
clinit_check_requirement_(clinit_check_requirement),
target_method_(target_method),
- dispatch_info_(dispatch_info) {}
+ dispatch_info_(dispatch_info) { }
void SetDispatchInfo(const DispatchInfo& dispatch_info) {
+ bool had_current_method_input = HasCurrentMethodInput();
+ bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
+
+ // Using the current method is the default and once we find a better
+ // method load kind, we should not go back to using the current method.
+ DCHECK(had_current_method_input || !needs_current_method_input);
+
+ if (had_current_method_input && !needs_current_method_input) {
+ DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
+ RemoveInputAt(GetSpecialInputIndex());
+ }
dispatch_info_ = dispatch_info;
}
+ void AddSpecialInput(HInstruction* input) {
+ // We allow only one special input.
+ DCHECK(!IsStringInit() && !HasCurrentMethodInput());
+ DCHECK(InputCount() == GetSpecialInputIndex() ||
+ (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
+ InsertInputAt(GetSpecialInputIndex(), input);
+ }
+
bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
// We access the method via the dex cache so we can't do an implicit null check.
// TODO: for intrinsics we can generate implicit null checks.
@@ -3427,19 +3483,42 @@ class HInvokeStaticOrDirect : public HInvoke {
return return_type_ == Primitive::kPrimNot && !IsStringInit();
}
- InvokeType GetInvokeType() const { return invoke_type_; }
+ // Get the index of the special input, if any.
+ //
+ // If the invoke IsStringInit(), it initially has a HFakeString special argument
+ // which is removed by the instruction simplifier; if the invoke HasCurrentMethodInput(),
+ // the "special input" is the current method pointer; otherwise there may be one
+ // platform-specific special input, such as PC-relative addressing base.
+ uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
+
+ InvokeType GetOptimizedInvokeType() const { return optimized_invoke_type_; }
+ void SetOptimizedInvokeType(InvokeType invoke_type) {
+ optimized_invoke_type_ = invoke_type;
+ }
+
MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
- uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); }
bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
- bool HasPcRelDexCache() const {
+ bool HasPcRelativeDexCache() const {
return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
}
+ bool HasCurrentMethodInput() const {
+ // This function can be called only after the invoke has been fully initialized by the builder.
+ if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
+ DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
+ return true;
+ } else {
+ DCHECK(InputCount() == GetSpecialInputIndex() ||
+ !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
+ return false;
+ }
+ }
bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
MethodReference GetTargetMethod() const { return target_method_; }
+ void SetTargetMethod(MethodReference method) { target_method_ = method; }
int32_t GetStringInitOffset() const {
DCHECK(IsStringInit());
@@ -3452,7 +3531,7 @@ class HInvokeStaticOrDirect : public HInvoke {
}
uint32_t GetDexCacheArrayOffset() const {
- DCHECK(HasPcRelDexCache());
+ DCHECK(HasPcRelativeDexCache());
return dispatch_info_.method_load_data;
}
@@ -3465,29 +3544,28 @@ class HInvokeStaticOrDirect : public HInvoke {
// Is this instruction a call to a static method?
bool IsStatic() const {
- return GetInvokeType() == kStatic;
+ return GetOriginalInvokeType() == kStatic;
}
- // Remove the art::HLoadClass instruction set as last input by
- // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of
- // the initial art::HClinitCheck instruction (only relevant for
- // static calls with explicit clinit check).
- void RemoveLoadClassAsLastInput() {
+ // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
+ // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
+ // instruction; only relevant for static calls with explicit clinit check.
+ void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
DCHECK(IsStaticWithExplicitClinitCheck());
size_t last_input_index = InputCount() - 1;
HInstruction* last_input = InputAt(last_input_index);
DCHECK(last_input != nullptr);
- DCHECK(last_input->IsLoadClass()) << last_input->DebugName();
+ DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
RemoveAsUserOfInput(last_input_index);
inputs_.pop_back();
- clinit_check_requirement_ = ClinitCheckRequirement::kImplicit;
- DCHECK(IsStaticWithImplicitClinitCheck());
+ clinit_check_requirement_ = new_requirement;
+ DCHECK(!IsStaticWithExplicitClinitCheck());
}
bool IsStringFactoryFor(HFakeString* str) const {
if (!IsStringInit()) return false;
- // +1 for the current method.
- if (InputCount() == (number_of_arguments_ + 1)) return false;
+ DCHECK(!HasCurrentMethodInput());
+ if (InputCount() == (number_of_arguments_)) return false;
return InputAt(InputCount() - 1)->AsFakeString() == str;
}
@@ -3502,7 +3580,7 @@ class HInvokeStaticOrDirect : public HInvoke {
}
// Is this a call to a static method whose declaring class has an
- // explicit intialization check in the graph?
+ // explicit initialization check in the graph?
bool IsStaticWithExplicitClinitCheck() const {
return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit);
}
@@ -3513,6 +3591,11 @@ class HInvokeStaticOrDirect : public HInvoke {
return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit);
}
+ // Does this method load kind need the current method as an input?
+ static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
+ return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
+ }
+
DECLARE_INSTRUCTION(InvokeStaticOrDirect);
protected:
@@ -3530,8 +3613,11 @@ class HInvokeStaticOrDirect : public HInvoke {
return input_record;
}
+ void InsertInputAt(size_t index, HInstruction* input);
+ void RemoveInputAt(size_t index);
+
private:
- const InvokeType invoke_type_;
+ InvokeType optimized_invoke_type_;
ClinitCheckRequirement clinit_check_requirement_;
// The target method may refer to different dex file or method index than the original
// invoke. This happens for sharpened calls and for calls where a method was redeclared
@@ -3541,6 +3627,8 @@ class HInvokeStaticOrDirect : public HInvoke {
DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
};
+std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
+std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
class HInvokeVirtual : public HInvoke {
public:
@@ -3595,18 +3683,24 @@ class HInvokeInterface : public HInvoke {
DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
};
-class HNewInstance : public HExpression<1> {
+class HNewInstance : public HExpression<2> {
public:
- HNewInstance(HCurrentMethod* current_method,
+ HNewInstance(HInstruction* cls,
+ HCurrentMethod* current_method,
uint32_t dex_pc,
uint16_t type_index,
const DexFile& dex_file,
+ bool can_throw,
+ bool finalizable,
QuickEntrypointEnum entrypoint)
: HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
type_index_(type_index),
dex_file_(dex_file),
+ can_throw_(can_throw),
+ finalizable_(finalizable),
entrypoint_(entrypoint) {
- SetRawInputAt(0, current_method);
+ SetRawInputAt(0, cls);
+ SetRawInputAt(1, current_method);
}
uint16_t GetTypeIndex() const { return type_index_; }
@@ -3614,22 +3708,30 @@ class HNewInstance : public HExpression<1> {
// Calls runtime so needs an environment.
bool NeedsEnvironment() const OVERRIDE { return true; }
- // It may throw when called on:
- // - interfaces
- // - abstract/innaccessible/unknown classes
- // TODO: optimize when possible.
- bool CanThrow() const OVERRIDE { return true; }
+
+ // It may throw when called on type that's not instantiable/accessible.
+ // It can throw OOME.
+ // TODO: distinguish between the two cases so we can for example allow allocation elimination.
+ bool CanThrow() const OVERRIDE { return can_throw_ || true; }
+
+ bool IsFinalizable() const { return finalizable_; }
bool CanBeNull() const OVERRIDE { return false; }
QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
+ void SetEntrypoint(QuickEntrypointEnum entrypoint) {
+ entrypoint_ = entrypoint;
+ }
+
DECLARE_INSTRUCTION(NewInstance);
private:
const uint16_t type_index_;
const DexFile& dex_file_;
- const QuickEntrypointEnum entrypoint_;
+ const bool can_throw_;
+ const bool finalizable_;
+ QuickEntrypointEnum entrypoint_;
DISALLOW_COPY_AND_ASSIGN(HNewInstance);
};
@@ -4237,9 +4339,13 @@ class HPhi : public HInstruction {
: HInstruction(SideEffects::None(), dex_pc),
inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
reg_number_(reg_number),
- type_(type),
- is_live_(false),
+ type_(ToPhiType(type)),
+ // Phis are constructed live and marked dead if conflicting or unused.
+ // Individual steps of SsaBuilder should assume that if a phi has been
+ // marked dead, it can be ignored and will be removed by SsaPhiElimination.
+ is_live_(true),
can_be_null_(true) {
+ DCHECK_NE(type_, Primitive::kPrimVoid);
}
// Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
@@ -4710,13 +4816,15 @@ class HLoadClass : public HExpression<1> {
const DexFile& dex_file,
bool is_referrers_class,
uint32_t dex_pc,
- bool needs_access_check)
+ bool needs_access_check,
+ bool is_in_dex_cache)
: HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
type_index_(type_index),
dex_file_(dex_file),
is_referrers_class_(is_referrers_class),
generate_clinit_check_(false),
needs_access_check_(needs_access_check),
+ is_in_dex_cache_(is_in_dex_cache),
loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
// Referrers class should not need access check. We never inline unverified
// methods so we can't possibly end up in this situation.
@@ -4741,14 +4849,13 @@ class HLoadClass : public HExpression<1> {
bool CanBeNull() const OVERRIDE { return false; }
bool NeedsEnvironment() const OVERRIDE {
- // Will call runtime and load the class if the class is not loaded yet.
- // TODO: finer grain decision.
- return !is_referrers_class_;
+ return CanCallRuntime();
}
bool MustGenerateClinitCheck() const {
return generate_clinit_check_;
}
+
void SetMustGenerateClinitCheck(bool generate_clinit_check) {
// The entrypoint the code generator is going to call does not do
// clinit of the class.
@@ -4757,7 +4864,9 @@ class HLoadClass : public HExpression<1> {
}
bool CanCallRuntime() const {
- return MustGenerateClinitCheck() || !is_referrers_class_ || needs_access_check_;
+ return MustGenerateClinitCheck() ||
+ (!is_referrers_class_ && !is_in_dex_cache_) ||
+ needs_access_check_;
}
bool NeedsAccessCheck() const {
@@ -4765,8 +4874,6 @@ class HLoadClass : public HExpression<1> {
}
bool CanThrow() const OVERRIDE {
- // May call runtime and and therefore can throw.
- // TODO: finer grain decision.
return CanCallRuntime();
}
@@ -4788,6 +4895,8 @@ class HLoadClass : public HExpression<1> {
return SideEffects::CanTriggerGC();
}
+ bool IsInDexCache() const { return is_in_dex_cache_; }
+
DECLARE_INSTRUCTION(LoadClass);
private:
@@ -4797,7 +4906,8 @@ class HLoadClass : public HExpression<1> {
// Whether this instruction must generate the initialization check.
// Used for code generation.
bool generate_clinit_check_;
- bool needs_access_check_;
+ const bool needs_access_check_;
+ const bool is_in_dex_cache_;
ReferenceTypeInfo loaded_class_rti_;
@@ -4862,6 +4972,7 @@ class HClinitCheck : public HExpression<1> {
return true;
}
+ bool CanThrow() const OVERRIDE { return true; }
HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
@@ -5474,6 +5585,9 @@ class HParallelMove : public HTemplateInstruction<0> {
} // namespace art
+#ifdef ART_ENABLE_CODEGEN_arm
+#include "nodes_arm.h"
+#endif
#ifdef ART_ENABLE_CODEGEN_arm64
#include "nodes_arm64.h"
#endif