summaryrefslogtreecommitdiff
path: root/compiler/optimizing/nodes.h
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/nodes.h')
-rw-r--r--compiler/optimizing/nodes.h69
1 files changed, 62 insertions, 7 deletions
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index eebc49c991..e3f4d8f035 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -333,7 +333,8 @@ class HGraph : public ArenaObject<kArenaAllocGraph> {
cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
cached_current_method_(nullptr),
inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
- osr_(osr) {
+ osr_(osr),
+ cha_single_implementation_list_(arena->Adapter(kArenaAllocCHA)) {
blocks_.reserve(kDefaultNumberOfBlocks);
}
@@ -536,6 +537,20 @@ class HGraph : public ArenaObject<kArenaAllocGraph> {
bool IsCompilingOsr() const { return osr_; }
+ ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
+ return cha_single_implementation_list_;
+ }
+
+ void AddCHASingleImplementationDependency(ArtMethod* method) {
+ cha_single_implementation_list_.insert(method);
+ }
+
+ bool HasShouldDeoptimizeFlag() const {
+ // TODO: if all CHA guards can be eliminated, there is no need for the flag
+ // even if cha_single_implementation_list_ is not empty.
+ return !cha_single_implementation_list_.empty();
+ }
+
bool HasTryCatch() const { return has_try_catch_; }
void SetHasTryCatch(bool value) { has_try_catch_ = value; }
@@ -672,6 +687,9 @@ class HGraph : public ArenaObject<kArenaAllocGraph> {
// compiled code entries which the interpreter can directly jump to.
const bool osr_;
+ // List of methods that are assumed to have single implementation.
+ ArenaSet<ArtMethod*> cha_single_implementation_list_;
+
friend class SsaBuilder; // For caching constants.
friend class SsaLivenessAnalysis; // For the linear order.
friend class HInliner; // For the reverse post order.
@@ -1240,6 +1258,7 @@ class HLoopInformationOutwardIterator : public ValueObject {
M(ClinitCheck, Instruction) \
M(Compare, BinaryOperation) \
M(CurrentMethod, Instruction) \
+ M(ShouldDeoptimizeFlag, Instruction) \
M(Deoptimize, Instruction) \
M(Div, BinaryOperation) \
M(DivZeroCheck, Instruction) \
@@ -2072,6 +2091,8 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> {
#undef INSTRUCTION_TYPE_CHECK
// Returns whether the instruction can be moved within the graph.
+ // TODO: this method is used by LICM and GVN with possibly different
+ // meanings? split and rename?
virtual bool CanBeMoved() const { return false; }
// Returns whether the two instructions are of the same kind.
@@ -2873,6 +2894,27 @@ class HDeoptimize FINAL : public HTemplateInstruction<1> {
DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
};
+// Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
+// The compiled code checks this flag value in a guard before devirtualized call and
+// if it's true, starts to do deoptimization.
+// It has a 4-byte slot on stack.
+// TODO: allocate a register for this flag.
+class HShouldDeoptimizeFlag FINAL : public HExpression<0> {
+ public:
+ // TODO: use SideEffects to aid eliminating some CHA guards.
+ explicit HShouldDeoptimizeFlag(uint32_t dex_pc)
+ : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
+ }
+
+ // We don't eliminate CHA guards yet.
+ bool CanBeMoved() const OVERRIDE { return false; }
+
+ DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(HShouldDeoptimizeFlag);
+};
+
// Represents the ArtMethod that was passed as a first argument to
// the method. It is used by instructions that depend on it, like
// instructions that work with the dex cache.
@@ -3789,7 +3831,7 @@ class HInvoke : public HInstruction {
bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
- bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); }
+ bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); }
bool InstructionDataEquals(const HInstruction* other) const OVERRIDE {
return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
@@ -4181,6 +4223,19 @@ class HInvokeVirtual FINAL : public HInvoke {
kVirtual),
vtable_index_(vtable_index) {}
+ bool CanBeNull() const OVERRIDE {
+ switch (GetIntrinsic()) {
+ case Intrinsics::kThreadCurrentThread:
+ case Intrinsics::kStringBufferAppend:
+ case Intrinsics::kStringBufferToString:
+ case Intrinsics::kStringBuilderAppend:
+ case Intrinsics::kStringBuilderToString:
+ return false;
+ default:
+ return HInvoke::CanBeNull();
+ }
+ }
+
bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
// TODO: Add implicit null checks in intrinsics.
return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
@@ -5698,7 +5753,7 @@ class HLoadString FINAL : public HInstruction {
};
HLoadString(HCurrentMethod* current_method,
- uint32_t string_index,
+ dex::StringIndex string_index,
const DexFile& dex_file,
uint32_t dex_pc)
: HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc),
@@ -5717,7 +5772,7 @@ class HLoadString FINAL : public HInstruction {
void SetLoadKindWithStringReference(LoadKind load_kind,
const DexFile& dex_file,
- uint32_t string_index) {
+ dex::StringIndex string_index) {
DCHECK(HasStringReference(load_kind));
load_data_.dex_file_ = &dex_file;
string_index_ = string_index;
@@ -5730,7 +5785,7 @@ class HLoadString FINAL : public HInstruction {
const DexFile& GetDexFile() const;
- uint32_t GetStringIndex() const {
+ dex::StringIndex GetStringIndex() const {
DCHECK(HasStringReference(GetLoadKind()) || /* For slow paths. */ !IsInDexCache());
return string_index_;
}
@@ -5744,7 +5799,7 @@ class HLoadString FINAL : public HInstruction {
bool InstructionDataEquals(const HInstruction* other) const OVERRIDE;
- size_t ComputeHashCode() const OVERRIDE { return string_index_; }
+ size_t ComputeHashCode() const OVERRIDE { return string_index_.index_; }
// Will call the runtime if we need to load the string through
// the dex cache and the string is not guaranteed to be there yet.
@@ -5823,7 +5878,7 @@ class HLoadString FINAL : public HInstruction {
// String index serves also as the hash code and it's also needed for slow-paths,
// so it must not be overwritten with other load data.
- uint32_t string_index_;
+ dex::StringIndex string_index_;
union {
const DexFile* dex_file_; // For string reference.