diff options
author | 2016-02-25 11:37:38 +0000 | |
---|---|---|
committer | 2016-02-26 16:01:59 +0000 | |
commit | a1de9188a05afdecca8cd04ecc4fefbac8b9880f (patch) | |
tree | a671c8aef814ccf194e5c3950a551f2711516c53 /compiler/optimizing | |
parent | 950d063395c7cecbbe372fd607468018d661a35c (diff) |
Optimizing: Reduce memory usage of HInstructions.
Pack narrow fields and flags into a single 32-bit field.
Change-Id: Ib2f7abf987caee0339018d21f0d498f8db63542d
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/builder.cc | 8 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_mips.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/inliner.cc | 3 | ||||
-rw-r--r-- | compiler/optimizing/nodes.cc | 11 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 624 |
9 files changed, 426 insertions, 230 deletions
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc index 35ec7d41ff..57660c2623 100644 --- a/compiler/optimizing/builder.cc +++ b/compiler/optimizing/builder.cc @@ -282,7 +282,7 @@ void HGraphBuilder::InsertTryBoundaryBlocks(const DexFile::CodeItem& code_item) // Found a predecessor not covered by the same TryItem. Insert entering // boundary block. HTryBoundary* try_entry = - new (arena_) HTryBoundary(HTryBoundary::kEntry, try_block->GetDexPc()); + new (arena_) HTryBoundary(HTryBoundary::BoundaryKind::kEntry, try_block->GetDexPc()); try_block->CreateImmediateDominator()->AddInstruction(try_entry); LinkToCatchBlocks(try_entry, code_item, entry.second); break; @@ -316,7 +316,7 @@ void HGraphBuilder::InsertTryBoundaryBlocks(const DexFile::CodeItem& code_item) // Insert TryBoundary and link to catch blocks. HTryBoundary* try_exit = - new (arena_) HTryBoundary(HTryBoundary::kExit, successor->GetDexPc()); + new (arena_) HTryBoundary(HTryBoundary::BoundaryKind::kExit, successor->GetDexPc()); graph_->SplitEdge(try_block, successor)->AddInstruction(try_exit); LinkToCatchBlocks(try_exit, code_item, entry.second); } @@ -2843,7 +2843,7 @@ bool HGraphBuilder::AnalyzeDexInstruction(const Instruction& instruction, uint32 case Instruction::MONITOR_ENTER: { current_block_->AddInstruction(new (arena_) HMonitorOperation( LoadLocal(instruction.VRegA_11x(), Primitive::kPrimNot, dex_pc), - HMonitorOperation::kEnter, + HMonitorOperation::OperationKind::kEnter, dex_pc)); break; } @@ -2851,7 +2851,7 @@ bool HGraphBuilder::AnalyzeDexInstruction(const Instruction& instruction, uint32 case Instruction::MONITOR_EXIT: { current_block_->AddInstruction(new (arena_) HMonitorOperation( LoadLocal(instruction.VRegA_11x(), Primitive::kPrimNot, dex_pc), - HMonitorOperation::kExit, + HMonitorOperation::OperationKind::kExit, dex_pc)); break; } diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 10d3426a58..aa9b01f30b 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -6594,7 +6594,7 @@ void LocationsBuilderARM::VisitClassTableGet(HClassTableGet* instruction) { void InstructionCodeGeneratorARM::VisitClassTableGet(HClassTableGet* instruction) { LocationSummary* locations = instruction->GetLocations(); uint32_t method_offset = 0; - if (instruction->GetTableKind() == HClassTableGet::kVTable) { + if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) { method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kArmPointerSize).SizeValue(); } else { diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index beb75f0afc..c27209f0b1 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -5030,7 +5030,7 @@ void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) { void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) { LocationSummary* locations = instruction->GetLocations(); uint32_t method_offset = 0; - if (instruction->GetTableKind() == HClassTableGet::kVTable) { + if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) { method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kArm64PointerSize).SizeValue(); } else { diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index 8d3d94b79d..f3c12efd8d 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -5245,7 +5245,7 @@ void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) { void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) { LocationSummary* locations = instruction->GetLocations(); uint32_t method_offset = 0; - if (instruction->GetTableKind() == HClassTableGet::kVTable) { + if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) { method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kMipsPointerSize).SizeValue(); } else { diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 88e42f3faf..6b4a18c688 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -4127,7 +4127,7 @@ void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) { void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction) { LocationSummary* locations = instruction->GetLocations(); uint32_t method_offset = 0; - if (instruction->GetTableKind() == HClassTableGet::kVTable) { + if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) { method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kX86PointerSize).SizeValue(); } else { diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index bb24c6f59c..c132663016 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -3998,7 +3998,7 @@ void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) { void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) { LocationSummary* locations = instruction->GetLocations(); uint32_t method_offset = 0; - if (instruction->GetTableKind() == HClassTableGet::kVTable) { + if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) { method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kX86_64PointerSize).SizeValue(); } else { diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 02a1acc240..d55009554f 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -651,7 +651,8 @@ bool HInliner::TryInlinePolymorphicCallToSameTarget(HInvoke* invoke_instruction, HClassTableGet* class_table_get = new (graph_->GetArena()) HClassTableGet( receiver_class, type, - invoke_instruction->IsInvokeVirtual() ? HClassTableGet::kVTable : HClassTableGet::kIMTable, + invoke_instruction->IsInvokeVirtual() ? HClassTableGet::TableKind::kVTable + : HClassTableGet::TableKind::kIMTable, method_offset, invoke_instruction->GetDexPc()); diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc index f9acb089ee..27a5b97f5f 100644 --- a/compiler/optimizing/nodes.cc +++ b/compiler/optimizing/nodes.cc @@ -2206,7 +2206,8 @@ void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) { CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound()); } } - reference_type_info_ = rti; + reference_type_handle_ = rti.GetTypeHandle(); + SetPackedFlag<kFlagReferenceTypeIsExact>(rti.IsExact()); } void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) { @@ -2217,17 +2218,15 @@ void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound); } upper_bound_ = upper_bound; - upper_can_be_null_ = can_be_null; + SetPackedFlag<kFlagUpperCanBeNull>(can_be_null); } -ReferenceTypeInfo::ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {} - -ReferenceTypeInfo::ReferenceTypeInfo(TypeHandle type_handle, bool is_exact) - : type_handle_(type_handle), is_exact_(is_exact) { +ReferenceTypeInfo ReferenceTypeInfo::Create(TypeHandle type_handle, bool is_exact) { if (kIsDebugBuild) { ScopedObjectAccess soa(Thread::Current()); DCHECK(IsValidHandle(type_handle)); } + return ReferenceTypeInfo(type_handle, is_exact); } std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs) { diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 4185b2f2c5..174b29a806 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -154,8 +154,9 @@ class ReferenceTypeInfo : ValueObject { public: typedef Handle<mirror::Class> TypeHandle; - static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact) { - // The constructor will check that the type_handle is valid. + static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact); + + static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) { return ReferenceTypeInfo(type_handle, is_exact); } @@ -254,8 +255,9 @@ class ReferenceTypeInfo : ValueObject { } private: - ReferenceTypeInfo(); - ReferenceTypeInfo(TypeHandle type_handle, bool is_exact); + ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {} + ReferenceTypeInfo(TypeHandle type_handle, bool is_exact) + : type_handle_(type_handle), is_exact_(is_exact) { } // The class of the object. TypeHandle type_handle_; @@ -1847,13 +1849,15 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { dex_pc_(dex_pc), id_(-1), ssa_index_(-1), - emitted_at_use_site_(false), + packed_fields_(0u), environment_(nullptr), locations_(nullptr), live_interval_(nullptr), lifetime_position_(kNoLifetime), side_effects_(side_effects), - reference_type_info_(ReferenceTypeInfo::CreateInvalid()) {} + reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) { + SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact()); + } virtual ~HInstruction() {} @@ -1922,7 +1926,8 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { ReferenceTypeInfo GetReferenceTypeInfo() const { DCHECK_EQ(GetType(), Primitive::kPrimNot); - return reference_type_info_; + return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_, + GetPackedFlag<kFlagReferenceTypeIsExact>());; } void AddUseAt(HInstruction* user, size_t index) { @@ -2111,13 +2116,45 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { // The caller must ensure that this is safe to do. void RemoveEnvironmentUsers(); - bool IsEmittedAtUseSite() const { return emitted_at_use_site_; } - void MarkEmittedAtUseSite() { emitted_at_use_site_ = true; } + bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); } + void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); } protected: + // If set, the machine code for this instruction is assumed to be generated by + // its users. Used by liveness analysis to compute use positions accordingly. + static constexpr size_t kFlagEmittedAtUseSite = 0u; + static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1; + static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1; + static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte; + virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0; virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0; + uint32_t GetPackedFields() const { + return packed_fields_; + } + + template <size_t flag> + bool GetPackedFlag() const { + return (packed_fields_ & (1u << flag)) != 0u; + } + + template <size_t flag> + void SetPackedFlag(bool value = true) { + packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag); + } + + template <typename BitFieldType> + typename BitFieldType::value_type GetPackedField() const { + return BitFieldType::Decode(packed_fields_); + } + + template <typename BitFieldType> + void SetPackedField(typename BitFieldType::value_type value) { + DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value))); + packed_fields_ = BitFieldType::Update(value, packed_fields_); + } + private: void RemoveEnvironmentUser(HUseListNode<HEnvironment*>* use_node) { env_uses_.Remove(use_node); } @@ -2134,9 +2171,8 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { // When doing liveness analysis, instructions that have uses get an SSA index. int ssa_index_; - // If set, the machine code for this instruction is assumed to be generated by - // its users. Used by liveness analysis to compute use positions accordingly. - bool emitted_at_use_site_; + // Packed fields. + uint32_t packed_fields_; // List of instructions that have this instruction as input. HUseList<HInstruction*> uses_; @@ -2160,8 +2196,10 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { SideEffects side_effects_; + // The reference handle part of the reference type info. + // The IsExact() flag is stored in packed fields. // TODO: for primitive types this should be marked as invalid. - ReferenceTypeInfo reference_type_info_; + ReferenceTypeInfo::TypeHandle reference_type_handle_; friend class GraphChecker; friend class HBasicBlock; @@ -2287,13 +2325,23 @@ template<intptr_t N> class HExpression : public HTemplateInstruction<N> { public: HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc) - : HTemplateInstruction<N>(side_effects, dex_pc), type_(type) {} + : HTemplateInstruction<N>(side_effects, dex_pc) { + this->template SetPackedField<TypeField>(type); + } virtual ~HExpression() {} - Primitive::Type GetType() const OVERRIDE { return type_; } + Primitive::Type GetType() const OVERRIDE { + return TypeField::Decode(this->GetPackedFields()); + } protected: - Primitive::Type type_; + static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits; + static constexpr size_t kFieldTypeSize = + MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); + static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize; + static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits, + "Too many packed fields."); + using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>; }; // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow @@ -2583,13 +2631,16 @@ class HIf : public HTemplateInstruction<1> { // higher indices in no particular order. class HTryBoundary : public HTemplateInstruction<0> { public: - enum BoundaryKind { + enum class BoundaryKind { kEntry, kExit, + kLast = kExit }; explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc) - : HTemplateInstruction(SideEffects::None(), dex_pc), kind_(kind) {} + : HTemplateInstruction(SideEffects::None(), dex_pc) { + SetPackedField<BoundaryKindField>(kind); + } bool IsControlFlow() const OVERRIDE { return true; } @@ -2615,14 +2666,22 @@ class HTryBoundary : public HTemplateInstruction<0> { } } - bool IsEntry() const { return kind_ == BoundaryKind::kEntry; } + BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); } + bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; } bool HasSameExceptionHandlersAs(const HTryBoundary& other) const; DECLARE_INSTRUCTION(TryBoundary); private: - const BoundaryKind kind_; + static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits; + static constexpr size_t kFieldBoundaryKindSize = + MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast)); + static constexpr size_t kNumberOfTryBoundaryPackedBits = + kFieldBoundaryKind + kFieldBoundaryKindSize; + static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits, + "Too many packed fields."); + using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>; DISALLOW_COPY_AND_ASSIGN(HTryBoundary); }; @@ -2668,9 +2727,10 @@ class HCurrentMethod : public HExpression<0> { // of a class. class HClassTableGet : public HExpression<1> { public: - enum TableKind { + enum class TableKind { kVTable, kIMTable, + kLast = kIMTable }; HClassTableGet(HInstruction* cls, Primitive::Type type, @@ -2678,26 +2738,33 @@ class HClassTableGet : public HExpression<1> { size_t index, uint32_t dex_pc) : HExpression(type, SideEffects::None(), dex_pc), - index_(index), - table_kind_(kind) { + index_(index) { + SetPackedField<TableKindField>(kind); SetRawInputAt(0, cls); } bool CanBeMoved() const OVERRIDE { return true; } bool InstructionDataEquals(HInstruction* other) const OVERRIDE { return other->AsClassTableGet()->GetIndex() == index_ && - other->AsClassTableGet()->GetTableKind() == table_kind_; + other->AsClassTableGet()->GetPackedFields() == GetPackedFields(); } - TableKind GetTableKind() const { return table_kind_; } + TableKind GetTableKind() const { return GetPackedField<TableKindField>(); } size_t GetIndex() const { return index_; } DECLARE_INSTRUCTION(ClassTableGet); private: + static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits; + static constexpr size_t kFieldTableKindSize = + MinimumBitsToStore(static_cast<size_t>(TableKind::kLast)); + static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize; + static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits, + "Too many packed fields."); + using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>; + // The index of the ArtMethod in the table. const size_t index_; - const TableKind table_kind_; DISALLOW_COPY_AND_ASSIGN(HClassTableGet); }; @@ -2864,6 +2931,7 @@ enum class ComparisonBias { kNoBias, // bias is not applicable (i.e. for long operation) kGtBias, // return 1 for NaN comparisons kLtBias, // return -1 for NaN comparisons + kLast = kLtBias }; std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs); @@ -2871,8 +2939,9 @@ std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs); class HCondition : public HBinaryOperation { public: HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) - : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc), - bias_(ComparisonBias::kNoBias) {} + : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) { + SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias); + } // For code generation purposes, returns whether this instruction is just before // `instruction`, and disregard moves in between. @@ -2884,12 +2953,12 @@ class HCondition : public HBinaryOperation { virtual IfCondition GetOppositeCondition() const = 0; - bool IsGtBias() const { return bias_ == ComparisonBias::kGtBias; } - ComparisonBias GetBias() const { return bias_; } - void SetBias(ComparisonBias bias) { bias_ = bias; } + bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; } + ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); } + void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); } bool InstructionDataEquals(HInstruction* other) const OVERRIDE { - return bias_ == other->AsCondition()->bias_; + return GetPackedFields() == other->AsCondition()->GetPackedFields(); } bool IsFPConditionTrueIfNaN() const { @@ -2905,6 +2974,16 @@ class HCondition : public HBinaryOperation { } protected: + // Needed if we merge a HCompare into a HCondition. + static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits; + static constexpr size_t kFieldComparisonBiasSize = + MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast)); + static constexpr size_t kNumberOfConditionPackedBits = + kFieldComparisonBias + kFieldComparisonBiasSize; + static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + using ComparisonBiasField = + BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>; + template <typename T> int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); } @@ -2922,9 +3001,6 @@ class HCondition : public HBinaryOperation { } private: - // Needed if we merge a HCompare into a HCondition. - ComparisonBias bias_; - DISALLOW_COPY_AND_ASSIGN(HCondition); }; @@ -3337,8 +3413,8 @@ class HCompare : public HBinaryOperation { first, second, SideEffectsForArchRuntimeCalls(type), - dex_pc), - bias_(bias) { + dex_pc) { + SetPackedField<ComparisonBiasField>(bias); DCHECK_EQ(type, first->GetType()); DCHECK_EQ(type, second->GetType()); } @@ -3373,16 +3449,16 @@ class HCompare : public HBinaryOperation { } bool InstructionDataEquals(HInstruction* other) const OVERRIDE { - return bias_ == other->AsCompare()->bias_; + return GetPackedFields() == other->AsCompare()->GetPackedFields(); } - ComparisonBias GetBias() const { return bias_; } + ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); } // Does this compare instruction have a "gt bias" (vs an "lt bias")? - // Only meaninfgul for floating-point comparisons. + // Only meaningful for floating-point comparisons. bool IsGtBias() const { DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType(); - return bias_ == ComparisonBias::kGtBias; + return GetBias() == ComparisonBias::kGtBias; } static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type) { @@ -3393,6 +3469,15 @@ class HCompare : public HBinaryOperation { DECLARE_INSTRUCTION(Compare); protected: + static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits; + static constexpr size_t kFieldComparisonBiasSize = + MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast)); + static constexpr size_t kNumberOfComparePackedBits = + kFieldComparisonBias + kFieldComparisonBiasSize; + static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + using ComparisonBiasField = + BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>; + // Return an integer constant containing the result of a comparison evaluated at compile time. HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const { DCHECK(value == -1 || value == 0 || value == 1) << value; @@ -3400,8 +3485,6 @@ class HCompare : public HBinaryOperation { } private: - const ComparisonBias bias_; - DISALLOW_COPY_AND_ASSIGN(HCompare); }; @@ -3469,9 +3552,9 @@ class HNewInstance : public HExpression<2> { : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc), type_index_(type_index), dex_file_(dex_file), - can_throw_(can_throw), - finalizable_(finalizable), entrypoint_(entrypoint) { + SetPackedFlag<kFlagCanThrow>(can_throw); + SetPackedFlag<kFlagFinalizable>(finalizable); SetRawInputAt(0, cls); SetRawInputAt(1, current_method); } @@ -3485,9 +3568,9 @@ class HNewInstance : public HExpression<2> { // It may throw when called on type that's not instantiable/accessible. // It can throw OOME. // TODO: distinguish between the two cases so we can for example allow allocation elimination. - bool CanThrow() const OVERRIDE { return can_throw_ || true; } + bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>() || true; } - bool IsFinalizable() const { return finalizable_; } + bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); } bool CanBeNull() const OVERRIDE { return false; } @@ -3502,10 +3585,14 @@ class HNewInstance : public HExpression<2> { DECLARE_INSTRUCTION(NewInstance); private: + static constexpr size_t kFlagCanThrow = kNumberOfExpressionPackedBits; + static constexpr size_t kFlagFinalizable = kFlagCanThrow + 1; + static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1; + static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits, + "Too many packed fields."); + const uint16_t type_index_; const DexFile& dex_file_; - const bool can_throw_; - const bool finalizable_; QuickEntrypointEnum entrypoint_; DISALLOW_COPY_AND_ASSIGN(HNewInstance); @@ -3555,12 +3642,14 @@ class HInvoke : public HInstruction { // inputs at the end of their list of inputs. uint32_t GetNumberOfArguments() const { return number_of_arguments_; } - Primitive::Type GetType() const OVERRIDE { return return_type_; } + Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); } uint32_t GetDexMethodIndex() const { return dex_method_index_; } const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } - InvokeType GetOriginalInvokeType() const { return original_invoke_type_; } + InvokeType GetOriginalInvokeType() const { + return GetPackedField<OriginalInvokeTypeField>(); + } Intrinsics GetIntrinsic() const { return intrinsic_; @@ -3575,7 +3664,7 @@ class HInvoke : public HInstruction { return GetEnvironment()->IsFromInlinedInvoke(); } - bool CanThrow() const OVERRIDE { return can_throw_; } + bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); } bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); } @@ -3596,6 +3685,20 @@ class HInvoke : public HInstruction { DECLARE_ABSTRACT_INSTRUCTION(Invoke); protected: + static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits; + static constexpr size_t kFieldOriginalInvokeTypeSize = + MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType)); + static constexpr size_t kFieldReturnType = + kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize; + static constexpr size_t kFieldReturnTypeSize = + MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); + static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize; + static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1; + static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + using OriginalInvokeTypeField = + BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>; + using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>; + HInvoke(ArenaAllocator* arena, uint32_t number_of_arguments, uint32_t number_of_other_inputs, @@ -3608,12 +3711,12 @@ class HInvoke : public HInstruction { number_of_arguments_(number_of_arguments), inputs_(number_of_arguments + number_of_other_inputs, arena->Adapter(kArenaAllocInvokeInputs)), - return_type_(return_type), dex_method_index_(dex_method_index), - original_invoke_type_(original_invoke_type), - can_throw_(true), intrinsic_(Intrinsics::kNone), intrinsic_optimizations_(0) { + SetPackedField<ReturnTypeField>(return_type); + SetPackedField<OriginalInvokeTypeField>(original_invoke_type); + SetPackedFlag<kFlagCanThrow>(true); } const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE { @@ -3624,14 +3727,11 @@ class HInvoke : public HInstruction { inputs_[index] = input; } - void SetCanThrow(bool can_throw) { can_throw_ = can_throw; } + void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); } uint32_t number_of_arguments_; ArenaVector<HUserRecord<HInstruction*>> inputs_; - const Primitive::Type return_type_; const uint32_t dex_method_index_; - const InvokeType original_invoke_type_; - bool can_throw_; Intrinsics intrinsic_; // A magic word holding optimizations for intrinsics. See intrinsics.h. @@ -3672,6 +3772,7 @@ class HInvokeStaticOrDirect : public HInvoke { kNone, // Class already initialized. kExplicit, // Static call having explicit clinit check as last input. kImplicit, // Static call implicitly requiring a clinit check. + kLast = kImplicit }; // Determines how to load the target ArtMethod*. @@ -3692,7 +3793,7 @@ class HInvokeStaticOrDirect : public HInvoke { // the image relocatable or not. kDirectAddressWithFixup, - // Load from resoved methods array in the dex cache using a PC-relative load. + // Load from resolved methods array in the dex cache using a PC-relative load. // Used when we need to use the dex cache, for example for invoke-static that // may cause class initialization (the entry may point to a resolution method), // and we know that we can access the dex cache arrays using a PC-relative load. @@ -3764,10 +3865,11 @@ class HInvokeStaticOrDirect : public HInvoke { dex_pc, method_index, original_invoke_type), - optimized_invoke_type_(optimized_invoke_type), - clinit_check_requirement_(clinit_check_requirement), target_method_(target_method), - dispatch_info_(dispatch_info) { } + dispatch_info_(dispatch_info) { + SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type); + SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement); + } void SetDispatchInfo(const DispatchInfo& dispatch_info) { bool had_current_method_input = HasCurrentMethodInput(); @@ -3799,7 +3901,7 @@ class HInvokeStaticOrDirect : public HInvoke { } bool CanBeNull() const OVERRIDE { - return return_type_ == Primitive::kPrimNot && !IsStringInit(); + return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit(); } // Get the index of the special input, if any. @@ -3810,9 +3912,12 @@ class HInvokeStaticOrDirect : public HInvoke { uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); } - InvokeType GetOptimizedInvokeType() const { return optimized_invoke_type_; } + InvokeType GetOptimizedInvokeType() const { + return GetPackedField<OptimizedInvokeTypeField>(); + } + void SetOptimizedInvokeType(InvokeType invoke_type) { - optimized_invoke_type_ = invoke_type; + SetPackedField<OptimizedInvokeTypeField>(invoke_type); } MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } @@ -3859,7 +3964,9 @@ class HInvokeStaticOrDirect : public HInvoke { return dispatch_info_.direct_code_ptr; } - ClinitCheckRequirement GetClinitCheckRequirement() const { return clinit_check_requirement_; } + ClinitCheckRequirement GetClinitCheckRequirement() const { + return GetPackedField<ClinitCheckRequirementField>(); + } // Is this instruction a call to a static method? bool IsStatic() const { @@ -3877,7 +3984,7 @@ class HInvokeStaticOrDirect : public HInvoke { DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName(); RemoveAsUserOfInput(last_input_index); inputs_.pop_back(); - clinit_check_requirement_ = new_requirement; + SetPackedField<ClinitCheckRequirementField>(new_requirement); DCHECK(!IsStaticWithExplicitClinitCheck()); } @@ -3893,13 +4000,13 @@ class HInvokeStaticOrDirect : public HInvoke { // Is this a call to a static method whose declaring class has an // explicit initialization check in the graph? bool IsStaticWithExplicitClinitCheck() const { - return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit); + return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit); } // Is this a call to a static method whose declaring class has an // implicit intialization check requirement? bool IsStaticWithImplicitClinitCheck() const { - return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit); + return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit); } // Does this method load kind need the current method as an input? @@ -3928,8 +4035,23 @@ class HInvokeStaticOrDirect : public HInvoke { void RemoveInputAt(size_t index); private: - InvokeType optimized_invoke_type_; - ClinitCheckRequirement clinit_check_requirement_; + static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits; + static constexpr size_t kFieldOptimizedInvokeTypeSize = + MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType)); + static constexpr size_t kFieldClinitCheckRequirement = + kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize; + static constexpr size_t kFieldClinitCheckRequirementSize = + MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast)); + static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits = + kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize; + static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits, + "Too many packed fields."); + using OptimizedInvokeTypeField = + BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>; + using ClinitCheckRequirementField = BitField<ClinitCheckRequirement, + kFieldClinitCheckRequirement, + kFieldClinitCheckRequirementSize>; + // The target method may refer to different dex file or method index than the original // invoke. This happens for sharpened calls and for calls where a method was redeclared // in derived class to increase visibility. @@ -4595,32 +4717,35 @@ class HParameterValue : public HExpression<0> { : HExpression(parameter_type, SideEffects::None(), kNoDexPc), dex_file_(dex_file), type_index_(type_index), - index_(index), - is_this_(is_this), - can_be_null_(!is_this) {} + index_(index) { + SetPackedFlag<kFlagIsThis>(is_this); + SetPackedFlag<kFlagCanBeNull>(!is_this); + } const DexFile& GetDexFile() const { return dex_file_; } uint16_t GetTypeIndex() const { return type_index_; } uint8_t GetIndex() const { return index_; } - bool IsThis() const { return is_this_; } + bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); } - bool CanBeNull() const OVERRIDE { return can_be_null_; } - void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } + bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); } + void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); } DECLARE_INSTRUCTION(ParameterValue); private: + // Whether or not the parameter value corresponds to 'this' argument. + static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits; + static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1; + static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1; + static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits, + "Too many packed fields."); + const DexFile& dex_file_; const uint16_t type_index_; // The index of this parameter in the parameters list. Must be less // than HGraph::number_of_in_vregs_. const uint8_t index_; - // Whether or not the parameter value corresponds to 'this' argument. - const bool is_this_; - - bool can_be_null_; - DISALLOW_COPY_AND_ASSIGN(HParameterValue); }; @@ -4745,14 +4870,14 @@ class HPhi : public HInstruction { uint32_t dex_pc = kNoDexPc) : HInstruction(SideEffects::None(), dex_pc), inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)), - reg_number_(reg_number), - type_(ToPhiType(type)), - // Phis are constructed live and marked dead if conflicting or unused. - // Individual steps of SsaBuilder should assume that if a phi has been - // marked dead, it can be ignored and will be removed by SsaPhiElimination. - is_live_(true), - can_be_null_(true) { - DCHECK_NE(type_, Primitive::kPrimVoid); + reg_number_(reg_number) { + SetPackedField<TypeField>(ToPhiType(type)); + DCHECK_NE(GetType(), Primitive::kPrimVoid); + // Phis are constructed live and marked dead if conflicting or unused. + // Individual steps of SsaBuilder should assume that if a phi has been + // marked dead, it can be ignored and will be removed by SsaPhiElimination. + SetPackedFlag<kFlagIsLive>(true); + SetPackedFlag<kFlagCanBeNull>(true); } // Returns a type equivalent to the given `type`, but that a `HPhi` can hold. @@ -4775,27 +4900,27 @@ class HPhi : public HInstruction { void AddInput(HInstruction* input); void RemoveInputAt(size_t index); - Primitive::Type GetType() const OVERRIDE { return type_; } + Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); } void SetType(Primitive::Type new_type) { // Make sure that only valid type changes occur. The following are allowed: // (1) int -> float/ref (primitive type propagation), // (2) long -> double (primitive type propagation). - DCHECK(type_ == new_type || - (type_ == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) || - (type_ == Primitive::kPrimInt && new_type == Primitive::kPrimNot) || - (type_ == Primitive::kPrimLong && new_type == Primitive::kPrimDouble)); - type_ = new_type; + DCHECK(GetType() == new_type || + (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) || + (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) || + (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble)); + SetPackedField<TypeField>(new_type); } - bool CanBeNull() const OVERRIDE { return can_be_null_; } - void SetCanBeNull(bool can_be_null) { can_be_null_ = can_be_null; } + bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); } + void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); } uint32_t GetRegNumber() const { return reg_number_; } - void SetDead() { is_live_ = false; } - void SetLive() { is_live_ = true; } - bool IsDead() const { return !is_live_; } - bool IsLive() const { return is_live_; } + void SetDead() { SetPackedFlag<kFlagIsLive>(false); } + void SetLive() { SetPackedFlag<kFlagIsLive>(true); } + bool IsDead() const { return !IsLive(); } + bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); } bool IsVRegEquivalentOf(HInstruction* other) const { return other != nullptr @@ -4830,11 +4955,17 @@ class HPhi : public HInstruction { } private: + static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits; + static constexpr size_t kFieldTypeSize = + MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); + static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize; + static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1; + static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1; + static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>; + ArenaVector<HUserRecord<HInstruction*> > inputs_; const uint32_t reg_number_; - Primitive::Type type_; - bool is_live_; - bool can_be_null_; DISALLOW_COPY_AND_ASSIGN(HPhi); }; @@ -4973,8 +5104,8 @@ class HInstanceFieldSet : public HTemplateInstruction<2> { field_idx, declaring_class_def_index, dex_file, - dex_cache), - value_can_be_null_(true) { + dex_cache) { + SetPackedFlag<kFlagValueCanBeNull>(true); SetRawInputAt(0, object); SetRawInputAt(1, value); } @@ -4988,14 +5119,18 @@ class HInstanceFieldSet : public HTemplateInstruction<2> { Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); } bool IsVolatile() const { return field_info_.IsVolatile(); } HInstruction* GetValue() const { return InputAt(1); } - bool GetValueCanBeNull() const { return value_can_be_null_; } - void ClearValueCanBeNull() { value_can_be_null_ = false; } + bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); } + void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); } DECLARE_INSTRUCTION(InstanceFieldSet); private: + static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits; + static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1; + static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits, + "Too many packed fields."); + const FieldInfo field_info_; - bool value_can_be_null_; DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet); }; @@ -5064,11 +5199,11 @@ class HArraySet : public HTemplateInstruction<3> { SideEffects::ArrayWriteOfType(expected_component_type).Union( SideEffectsForArchRuntimeCalls(value->GetType())).Union( additional_side_effects), - dex_pc), - expected_component_type_(expected_component_type), - needs_type_check_(value->GetType() == Primitive::kPrimNot), - value_can_be_null_(true), - static_type_of_array_is_object_array_(false) { + dex_pc) { + SetPackedField<ExpectedComponentTypeField>(expected_component_type); + SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot); + SetPackedFlag<kFlagValueCanBeNull>(true); + SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false); SetRawInputAt(0, array); SetRawInputAt(1, index); SetRawInputAt(2, value); @@ -5076,11 +5211,11 @@ class HArraySet : public HTemplateInstruction<3> { bool NeedsEnvironment() const OVERRIDE { // We call a runtime method to throw ArrayStoreException. - return needs_type_check_; + return NeedsTypeCheck(); } // Can throw ArrayStoreException. - bool CanThrow() const OVERRIDE { return needs_type_check_; } + bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); } bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE { // TODO: Same as for ArrayGet. @@ -5088,20 +5223,22 @@ class HArraySet : public HTemplateInstruction<3> { } void ClearNeedsTypeCheck() { - needs_type_check_ = false; + SetPackedFlag<kFlagNeedsTypeCheck>(false); } void ClearValueCanBeNull() { - value_can_be_null_ = false; + SetPackedFlag<kFlagValueCanBeNull>(false); } void SetStaticTypeOfArrayIsObjectArray() { - static_type_of_array_is_object_array_ = true; + SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true); } - bool GetValueCanBeNull() const { return value_can_be_null_; } - bool NeedsTypeCheck() const { return needs_type_check_; } - bool StaticTypeOfArrayIsObjectArray() const { return static_type_of_array_is_object_array_; } + bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); } + bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); } + bool StaticTypeOfArrayIsObjectArray() const { + return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(); + } HInstruction* GetArray() const { return InputAt(0); } HInstruction* GetIndex() const { return InputAt(1); } @@ -5115,11 +5252,11 @@ class HArraySet : public HTemplateInstruction<3> { Primitive::Type value_type = GetValue()->GetType(); return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble)) ? value_type - : expected_component_type_; + : GetRawExpectedComponentType(); } Primitive::Type GetRawExpectedComponentType() const { - return expected_component_type_; + return GetPackedField<ExpectedComponentTypeField>(); } static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) { @@ -5129,12 +5266,20 @@ class HArraySet : public HTemplateInstruction<3> { DECLARE_INSTRUCTION(ArraySet); private: - const Primitive::Type expected_component_type_; - bool needs_type_check_; - bool value_can_be_null_; + static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits; + static constexpr size_t kFieldExpectedComponentTypeSize = + MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); + static constexpr size_t kFlagNeedsTypeCheck = + kFieldExpectedComponentType + kFieldExpectedComponentTypeSize; + static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1; // Cached information for the reference_type_info_ so that codegen // does not need to inspect the static type. - bool static_type_of_array_is_object_array_; + static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1; + static constexpr size_t kNumberOfArraySetPackedBits = + kFlagStaticTypeOfArrayIsObjectArray + 1; + static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + using ExpectedComponentTypeField = + BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>; DISALLOW_COPY_AND_ASSIGN(HArraySet); }; @@ -5244,14 +5389,15 @@ class HLoadClass : public HExpression<1> { : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), type_index_(type_index), dex_file_(dex_file), - is_referrers_class_(is_referrers_class), - generate_clinit_check_(false), - needs_access_check_(needs_access_check), - is_in_dex_cache_(is_in_dex_cache), loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) { // Referrers class should not need access check. We never inline unverified // methods so we can't possibly end up in this situation. - DCHECK(!is_referrers_class_ || !needs_access_check_); + DCHECK(!is_referrers_class || !needs_access_check); + + SetPackedFlag<kFlagIsReferrersClass>(is_referrers_class); + SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check); + SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache); + SetPackedFlag<kFlagGenerateClInitCheck>(false); SetRawInputAt(0, current_method); } @@ -5262,39 +5408,31 @@ class HLoadClass : public HExpression<1> { // Whether or not we need to generate the clinit check is processed in // prepare_for_register_allocator based on existing HInvokes and HClinitChecks. return other->AsLoadClass()->type_index_ == type_index_ && - other->AsLoadClass()->needs_access_check_ == needs_access_check_; + other->AsLoadClass()->GetPackedFields() == GetPackedFields(); } size_t ComputeHashCode() const OVERRIDE { return type_index_; } uint16_t GetTypeIndex() const { return type_index_; } - bool IsReferrersClass() const { return is_referrers_class_; } bool CanBeNull() const OVERRIDE { return false; } bool NeedsEnvironment() const OVERRIDE { return CanCallRuntime(); } - bool MustGenerateClinitCheck() const { - return generate_clinit_check_; - } - void SetMustGenerateClinitCheck(bool generate_clinit_check) { // The entrypoint the code generator is going to call does not do // clinit of the class. DCHECK(!NeedsAccessCheck()); - generate_clinit_check_ = generate_clinit_check; + SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check); } bool CanCallRuntime() const { return MustGenerateClinitCheck() || - (!is_referrers_class_ && !is_in_dex_cache_) || - needs_access_check_; + (!IsReferrersClass() && !IsInDexCache()) || + NeedsAccessCheck(); } - bool NeedsAccessCheck() const { - return needs_access_check_; - } bool CanThrow() const OVERRIDE { return CanCallRuntime(); @@ -5312,25 +5450,31 @@ class HLoadClass : public HExpression<1> { const DexFile& GetDexFile() { return dex_file_; } - bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !is_referrers_class_; } + bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !IsReferrersClass(); } static SideEffects SideEffectsForArchRuntimeCalls() { return SideEffects::CanTriggerGC(); } - bool IsInDexCache() const { return is_in_dex_cache_; } + bool IsReferrersClass() const { return GetPackedFlag<kFlagIsReferrersClass>(); } + bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); } + bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); } + bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); } DECLARE_INSTRUCTION(LoadClass); private: - const uint16_t type_index_; - const DexFile& dex_file_; - const bool is_referrers_class_; + static constexpr size_t kFlagIsReferrersClass = kNumberOfExpressionPackedBits; + static constexpr size_t kFlagNeedsAccessCheck = kFlagIsReferrersClass + 1; + static constexpr size_t kFlagIsInDexCache = kFlagNeedsAccessCheck + 1; // Whether this instruction must generate the initialization check. // Used for code generation. - bool generate_clinit_check_; - const bool needs_access_check_; - const bool is_in_dex_cache_; + static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInDexCache + 1; + static constexpr size_t kNumberOfLoadClassPackedBits = kFlagGenerateClInitCheck + 1; + static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields."); + + const uint16_t type_index_; + const DexFile& dex_file_; ReferenceTypeInfo loaded_class_rti_; @@ -5344,8 +5488,8 @@ class HLoadString : public HExpression<1> { uint32_t dex_pc, bool is_in_dex_cache) : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc), - string_index_(string_index), - is_in_dex_cache_(is_in_dex_cache) { + string_index_(string_index) { + SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache); SetRawInputAt(0, current_method); } @@ -5364,18 +5508,22 @@ class HLoadString : public HExpression<1> { bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return true; } bool CanBeNull() const OVERRIDE { return false; } - bool IsInDexCache() const { return is_in_dex_cache_; } bool CanThrow() const OVERRIDE { return !IsInDexCache(); } static SideEffects SideEffectsForArchRuntimeCalls() { return SideEffects::CanTriggerGC(); } + bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); } + DECLARE_INSTRUCTION(LoadString); private: + static constexpr size_t kFlagIsInDexCache = kNumberOfExpressionPackedBits; + static constexpr size_t kNumberOfLoadStringPackedBits = kFlagIsInDexCache + 1; + static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + const uint32_t string_index_; - const bool is_in_dex_cache_; DISALLOW_COPY_AND_ASSIGN(HLoadString); }; @@ -5482,8 +5630,8 @@ class HStaticFieldSet : public HTemplateInstruction<2> { field_idx, declaring_class_def_index, dex_file, - dex_cache), - value_can_be_null_(true) { + dex_cache) { + SetPackedFlag<kFlagValueCanBeNull>(true); SetRawInputAt(0, cls); SetRawInputAt(1, value); } @@ -5494,14 +5642,18 @@ class HStaticFieldSet : public HTemplateInstruction<2> { bool IsVolatile() const { return field_info_.IsVolatile(); } HInstruction* GetValue() const { return InputAt(1); } - bool GetValueCanBeNull() const { return value_can_be_null_; } - void ClearValueCanBeNull() { value_can_be_null_ = false; } + bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); } + void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); } DECLARE_INSTRUCTION(StaticFieldSet); private: + static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits; + static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1; + static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits, + "Too many packed fields."); + const FieldInfo field_info_; - bool value_can_be_null_; DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet); }; @@ -5539,8 +5691,8 @@ class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { uint32_t field_index, uint32_t dex_pc) : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), - field_type_(field_type), field_index_(field_index) { + SetPackedField<FieldTypeField>(field_type); DCHECK_EQ(field_type, value->GetType()); SetRawInputAt(0, obj); SetRawInputAt(1, value); @@ -5549,13 +5701,21 @@ class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> { bool NeedsEnvironment() const OVERRIDE { return true; } bool CanThrow() const OVERRIDE { return true; } - Primitive::Type GetFieldType() const { return field_type_; } + Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); } uint32_t GetFieldIndex() const { return field_index_; } DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet); private: - const Primitive::Type field_type_; + static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits; + static constexpr size_t kFieldFieldTypeSize = + MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); + static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits = + kFieldFieldType + kFieldFieldTypeSize; + static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits, + "Too many packed fields."); + using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>; + const uint32_t field_index_; DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet); @@ -5591,8 +5751,8 @@ class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { uint32_t field_index, uint32_t dex_pc) : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc), - field_type_(field_type), field_index_(field_index) { + SetPackedField<FieldTypeField>(field_type); DCHECK_EQ(field_type, value->GetType()); SetRawInputAt(0, value); } @@ -5600,13 +5760,21 @@ class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> { bool NeedsEnvironment() const OVERRIDE { return true; } bool CanThrow() const OVERRIDE { return true; } - Primitive::Type GetFieldType() const { return field_type_; } + Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); } uint32_t GetFieldIndex() const { return field_index_; } DECLARE_INSTRUCTION(UnresolvedStaticFieldSet); private: - const Primitive::Type field_type_; + static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits; + static constexpr size_t kFieldFieldTypeSize = + MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); + static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits = + kFieldFieldType + kFieldFieldTypeSize; + static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits, + "Too many packed fields."); + using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>; + const uint32_t field_index_; DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet); @@ -5670,7 +5838,8 @@ enum class TypeCheckKind { kAbstractClassCheck, // Can just walk the super class chain, starting one up. kInterfaceCheck, // No optimization yet when checking against an interface. kArrayObjectCheck, // Can just check if the array is not primitive. - kArrayCheck // No optimization yet when checking against a generic array. + kArrayCheck, // No optimization yet when checking against a generic array. + kLast = kArrayCheck }; std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs); @@ -5683,9 +5852,9 @@ class HInstanceOf : public HExpression<2> { uint32_t dex_pc) : HExpression(Primitive::kPrimBoolean, SideEffectsForArchRuntimeCalls(check_kind), - dex_pc), - check_kind_(check_kind), - must_do_null_check_(true) { + dex_pc) { + SetPackedField<TypeCheckKindField>(check_kind); + SetPackedFlag<kFlagMustDoNullCheck>(true); SetRawInputAt(0, object); SetRawInputAt(1, constant); } @@ -5697,16 +5866,14 @@ class HInstanceOf : public HExpression<2> { } bool NeedsEnvironment() const OVERRIDE { - return CanCallRuntime(check_kind_); + return CanCallRuntime(GetTypeCheckKind()); } - bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } - - TypeCheckKind GetTypeCheckKind() const { return check_kind_; } - // Used only in code generation. - bool MustDoNullCheck() const { return must_do_null_check_; } - void ClearMustDoNullCheck() { must_do_null_check_ = false; } + bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); } + void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); } + TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); } + bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; } static bool CanCallRuntime(TypeCheckKind check_kind) { // Mips currently does runtime calls for any other checks. @@ -5720,8 +5887,13 @@ class HInstanceOf : public HExpression<2> { DECLARE_INSTRUCTION(InstanceOf); private: - const TypeCheckKind check_kind_; - bool must_do_null_check_; + static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits; + static constexpr size_t kFieldTypeCheckKindSize = + MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast)); + static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize; + static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1; + static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>; DISALLOW_COPY_AND_ASSIGN(HInstanceOf); }; @@ -5730,28 +5902,35 @@ class HBoundType : public HExpression<1> { public: HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc) : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), - upper_bound_(ReferenceTypeInfo::CreateInvalid()), - upper_can_be_null_(true), - can_be_null_(true) { + upper_bound_(ReferenceTypeInfo::CreateInvalid()) { + SetPackedFlag<kFlagUpperCanBeNull>(true); + SetPackedFlag<kFlagCanBeNull>(true); DCHECK_EQ(input->GetType(), Primitive::kPrimNot); SetRawInputAt(0, input); } // {Get,Set}Upper* should only be used in reference type propagation. const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; } - bool GetUpperCanBeNull() const { return upper_can_be_null_; } + bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); } void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null); void SetCanBeNull(bool can_be_null) { - DCHECK(upper_can_be_null_ || !can_be_null); - can_be_null_ = can_be_null; + DCHECK(GetUpperCanBeNull() || !can_be_null); + SetPackedFlag<kFlagCanBeNull>(can_be_null); } - bool CanBeNull() const OVERRIDE { return can_be_null_; } + bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); } DECLARE_INSTRUCTION(BoundType); private: + // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this + // is false then CanBeNull() cannot be true). + static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits; + static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1; + static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1; + static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + // Encodes the most upper class that this instruction can have. In other words // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()). // It is used to bound the type in cases like: @@ -5759,10 +5938,6 @@ class HBoundType : public HExpression<1> { // // uper_bound_ will be ClassX // } ReferenceTypeInfo upper_bound_; - // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this - // is false then can_be_null_ cannot be true). - bool upper_can_be_null_; - bool can_be_null_; DISALLOW_COPY_AND_ASSIGN(HBoundType); }; @@ -5773,9 +5948,9 @@ class HCheckCast : public HTemplateInstruction<2> { HLoadClass* constant, TypeCheckKind check_kind, uint32_t dex_pc) - : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), - check_kind_(check_kind), - must_do_null_check_(true) { + : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) { + SetPackedField<TypeCheckKindField>(check_kind); + SetPackedFlag<kFlagMustDoNullCheck>(true); SetRawInputAt(0, object); SetRawInputAt(1, constant); } @@ -5793,17 +5968,21 @@ class HCheckCast : public HTemplateInstruction<2> { bool CanThrow() const OVERRIDE { return true; } - bool MustDoNullCheck() const { return must_do_null_check_; } - void ClearMustDoNullCheck() { must_do_null_check_ = false; } - TypeCheckKind GetTypeCheckKind() const { return check_kind_; } - - bool IsExactCheck() const { return check_kind_ == TypeCheckKind::kExactCheck; } + bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); } + void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); } + TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); } + bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; } DECLARE_INSTRUCTION(CheckCast); private: - const TypeCheckKind check_kind_; - bool must_do_null_check_; + static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits; + static constexpr size_t kFieldTypeCheckKindSize = + MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast)); + static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize; + static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1; + static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); + using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>; DISALLOW_COPY_AND_ASSIGN(HCheckCast); }; @@ -5812,30 +5991,40 @@ class HMemoryBarrier : public HTemplateInstruction<0> { public: explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc) : HTemplateInstruction( - SideEffects::AllWritesAndReads(), dex_pc), // Assume write/read on all fields/arrays. - barrier_kind_(barrier_kind) {} + SideEffects::AllWritesAndReads(), dex_pc) { // Assume write/read on all fields/arrays. + SetPackedField<BarrierKindField>(barrier_kind); + } - MemBarrierKind GetBarrierKind() { return barrier_kind_; } + MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); } DECLARE_INSTRUCTION(MemoryBarrier); private: - const MemBarrierKind barrier_kind_; + static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits; + static constexpr size_t kFieldBarrierKindSize = + MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind)); + static constexpr size_t kNumberOfMemoryBarrierPackedBits = + kFieldBarrierKind + kFieldBarrierKindSize; + static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits, + "Too many packed fields."); + using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>; DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier); }; class HMonitorOperation : public HTemplateInstruction<1> { public: - enum OperationKind { + enum class OperationKind { kEnter, kExit, + kLast = kExit }; HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc) : HTemplateInstruction( - SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. - kind_(kind) { + SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays. + dex_pc) { + SetPackedField<OperationKindField>(kind); SetRawInputAt(0, object); } @@ -5849,13 +6038,20 @@ class HMonitorOperation : public HTemplateInstruction<1> { return IsEnter(); } - - bool IsEnter() const { return kind_ == kEnter; } + OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); } + bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; } DECLARE_INSTRUCTION(MonitorOperation); private: - const OperationKind kind_; + static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits; + static constexpr size_t kFieldOperationKindSize = + MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast)); + static constexpr size_t kNumberOfMonitorOperationPackedBits = + kFieldOperationKind + kFieldOperationKindSize; + static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits, + "Too many packed fields."); + using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>; private: DISALLOW_COPY_AND_ASSIGN(HMonitorOperation); |