diff options
Diffstat (limited to 'compiler/optimizing/nodes.h')
| -rw-r--r-- | compiler/optimizing/nodes.h | 369 |
1 files changed, 134 insertions, 235 deletions
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index dfa8276651..6a45149509 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -24,20 +24,22 @@ #include "base/arena_bit_vector.h" #include "base/arena_containers.h" #include "base/arena_object.h" +#include "base/array_ref.h" +#include "base/iteration_range.h" #include "base/stl_util.h" +#include "base/transform_array_ref.h" #include "dex_file.h" #include "entrypoints/quick/quick_entrypoints_enum.h" #include "handle.h" #include "handle_scope.h" #include "invoke_type.h" +#include "intrinsics_enum.h" #include "locations.h" #include "method_reference.h" #include "mirror/class.h" #include "offsets.h" #include "primitive.h" -#include "utils/array_ref.h" #include "utils/intrusive_forward_list.h" -#include "utils/transform_array_ref.h" namespace art { @@ -109,6 +111,9 @@ enum IfCondition { kCondBE, // <= kCondA, // > kCondAE, // >= + // First and last aliases. + kCondFirst = kCondEQ, + kCondLast = kCondAE, }; enum GraphAnalysisResult { @@ -171,7 +176,7 @@ class ReferenceTypeInfo : ValueObject { static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact); - static ReferenceTypeInfo Create(TypeHandle type_handle) SHARED_REQUIRES(Locks::mutator_lock_) { + static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) { return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes()); } @@ -191,49 +196,49 @@ class ReferenceTypeInfo : ValueObject { bool IsExact() const { return is_exact_; } - bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); return GetTypeHandle()->IsObjectClass(); } - bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); return GetTypeHandle()->IsStringClass(); } - bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass(); } - bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); return GetTypeHandle()->IsInterface(); } - bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); return GetTypeHandle()->IsArrayClass(); } - bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); return GetTypeHandle()->IsPrimitiveArray(); } - bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray(); } - bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { + bool CanArrayHold(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); if (!IsExact()) return false; if (!IsArrayClass()) return false; return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get()); } - bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { + bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); if (!IsExact()) return false; if (!IsArrayClass()) return false; @@ -244,13 +249,13 @@ class ReferenceTypeInfo : ValueObject { Handle<mirror::Class> GetTypeHandle() const { return type_handle_; } - bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); DCHECK(rti.IsValid()); return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get()); } - bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(IsValid()); DCHECK(rti.IsValid()); return GetTypeHandle().Get() != rti.GetTypeHandle().Get() && @@ -260,7 +265,7 @@ class ReferenceTypeInfo : ValueObject { // Returns true if the type information provide the same amount of details. // Note that it does not mean that the instructions have the same actual type // (because the type can be the result of a merge). - bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) { + bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) { if (!IsValid() && !rti.IsValid()) { // Invalid types are equal. return true; @@ -332,7 +337,7 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { } // Acquires and stores RTI of inexact Object to be used when creating HNullConstant. - void InitializeInexactObjectRTI(StackHandleScopeCollection* handles); + void InitializeInexactObjectRTI(VariableSizedHandleScope* handles); ArenaAllocator* GetArena() const { return arena_; } const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; } @@ -456,10 +461,23 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { return reverse_post_order_; } + ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() { + DCHECK(GetReversePostOrder()[0] == entry_block_); + return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1); + } + + IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const { + return ReverseRange(GetReversePostOrder()); + } + const ArenaVector<HBasicBlock*>& GetLinearOrder() const { return linear_order_; } + IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const { + return ReverseRange(GetLinearOrder()); + } + bool HasBoundsChecks() const { return has_bounds_checks_; } @@ -575,7 +593,8 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { // List of blocks to perform a reverse post order tree traversal. ArenaVector<HBasicBlock*> reverse_post_order_; - // List of blocks to perform a linear order tree traversal. + // List of blocks to perform a linear order tree traversal. Unlike the reverse + // post order, this order is not incrementally kept up-to-date. ArenaVector<HBasicBlock*> linear_order_; HBasicBlock* entry_block_; @@ -827,7 +846,7 @@ static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1); class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> { public: - HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) + explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc) : graph_(graph), predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)), successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)), @@ -1311,7 +1330,8 @@ class HLoopInformationOutwardIterator : public ValueObject { #else #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \ M(MipsComputeBaseMethodAddress, Instruction) \ - M(MipsDexCacheArraysBase, Instruction) + M(MipsDexCacheArraysBase, Instruction) \ + M(MipsPackedSwitch, Instruction) #endif #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) @@ -1925,6 +1945,22 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { return !HasEnvironmentUses() && GetUses().HasExactlyOneElement(); } + bool IsRemovable() const { + return + !HasSideEffects() && + !CanThrow() && + !IsSuspendCheck() && + !IsControlFlow() && + !IsNativeDebugInfo() && + !IsParameterValue() && + // If we added an explicit barrier then we should keep it. + !IsMemoryBarrier(); + } + + bool IsDeadAndRemovable() const { + return IsRemovable() && !HasUses(); + } + // Does this instruction strictly dominate `other_instruction`? // Returns false if this instruction and `other_instruction` are the same. // Aborts if this instruction and `other_instruction` are both phis. @@ -2074,10 +2110,10 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { // to the current method. Such instructions are: // (1): Instructions that require an environment, as calling the runtime requires // to walk the stack and have the current method stored at a specific stack address. - // (2): Object literals like classes and strings, that are loaded from the dex cache - // fields of the current method. + // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass + // to access the dex cache. bool NeedsCurrentMethod() const { - return NeedsEnvironment() || IsLoadClass() || IsLoadString(); + return NeedsEnvironment() || IsCurrentMethod(); } // Returns whether the code generation of the instruction will require to have access @@ -3679,17 +3715,6 @@ class HNewInstance FINAL : public HExpression<2> { DISALLOW_COPY_AND_ASSIGN(HNewInstance); }; -enum class Intrinsics { -#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \ - k ## Name, -#include "intrinsics_list.h" - kNone, - INTRINSICS_LIST(OPTIMIZING_INTRINSICS) -#undef INTRINSICS_LIST -#undef OPTIMIZING_INTRINSICS -}; -std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic); - enum IntrinsicNeedsEnvironmentOrCache { kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache. kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache. @@ -3731,8 +3756,8 @@ class HInvoke : public HInstruction { uint32_t GetDexMethodIndex() const { return dex_method_index_; } const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); } - InvokeType GetOriginalInvokeType() const { - return GetPackedField<OriginalInvokeTypeField>(); + InvokeType GetInvokeType() const { + return GetPackedField<InvokeTypeField>(); } Intrinsics GetIntrinsic() const { @@ -3766,21 +3791,22 @@ class HInvoke : public HInstruction { bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; } + ArtMethod* GetResolvedMethod() const { return resolved_method_; } + DECLARE_ABSTRACT_INSTRUCTION(Invoke); protected: - static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits; - static constexpr size_t kFieldOriginalInvokeTypeSize = + static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits; + static constexpr size_t kFieldInvokeTypeSize = MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType)); static constexpr size_t kFieldReturnType = - kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize; + kFieldInvokeType + kFieldInvokeTypeSize; static constexpr size_t kFieldReturnTypeSize = MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast)); static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize; static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1; static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); - using OriginalInvokeTypeField = - BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>; + using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>; using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>; HInvoke(ArenaAllocator* arena, @@ -3789,23 +3815,26 @@ class HInvoke : public HInstruction { Primitive::Type return_type, uint32_t dex_pc, uint32_t dex_method_index, - InvokeType original_invoke_type) + ArtMethod* resolved_method, + InvokeType invoke_type) : HInstruction( SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays. number_of_arguments_(number_of_arguments), + resolved_method_(resolved_method), inputs_(number_of_arguments + number_of_other_inputs, arena->Adapter(kArenaAllocInvokeInputs)), dex_method_index_(dex_method_index), intrinsic_(Intrinsics::kNone), intrinsic_optimizations_(0) { SetPackedField<ReturnTypeField>(return_type); - SetPackedField<OriginalInvokeTypeField>(original_invoke_type); + SetPackedField<InvokeTypeField>(invoke_type); SetPackedFlag<kFlagCanThrow>(true); } void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); } uint32_t number_of_arguments_; + ArtMethod* const resolved_method_; ArenaVector<HUserRecord<HInstruction*>> inputs_; const uint32_t dex_method_index_; Intrinsics intrinsic_; @@ -3831,6 +3860,7 @@ class HInvokeUnresolved FINAL : public HInvoke { return_type, dex_pc, dex_method_index, + nullptr, invoke_type) { } @@ -3924,10 +3954,10 @@ class HInvokeStaticOrDirect FINAL : public HInvoke { Primitive::Type return_type, uint32_t dex_pc, uint32_t method_index, - MethodReference target_method, + ArtMethod* resolved_method, DispatchInfo dispatch_info, - InvokeType original_invoke_type, - InvokeType optimized_invoke_type, + InvokeType invoke_type, + MethodReference target_method, ClinitCheckRequirement clinit_check_requirement) : HInvoke(arena, number_of_arguments, @@ -3939,10 +3969,10 @@ class HInvokeStaticOrDirect FINAL : public HInvoke { return_type, dex_pc, method_index, - original_invoke_type), + resolved_method, + invoke_type), target_method_(target_method), dispatch_info_(dispatch_info) { - SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type); SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement); } @@ -4006,14 +4036,6 @@ class HInvokeStaticOrDirect FINAL : public HInvoke { uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); } bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); } - InvokeType GetOptimizedInvokeType() const { - return GetPackedField<OptimizedInvokeTypeField>(); - } - - void SetOptimizedInvokeType(InvokeType invoke_type) { - SetPackedField<OptimizedInvokeTypeField>(invoke_type); - } - MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; } CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; } bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; } @@ -4035,12 +4057,10 @@ class HInvokeStaticOrDirect FINAL : public HInvoke { } } bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; } - MethodReference GetTargetMethod() const { return target_method_; } - void SetTargetMethod(MethodReference method) { target_method_ = method; } - int32_t GetStringInitOffset() const { + QuickEntrypointEnum GetStringInitEntryPoint() const { DCHECK(IsStringInit()); - return dispatch_info_.method_load_data; + return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data); } uint64_t GetMethodAddress() const { @@ -4064,7 +4084,11 @@ class HInvokeStaticOrDirect FINAL : public HInvoke { // Is this instruction a call to a static method? bool IsStatic() const { - return GetOriginalInvokeType() == kStatic; + return GetInvokeType() == kStatic; + } + + MethodReference GetTargetMethod() const { + return target_method_; } // Remove the HClinitCheck or the replacement HLoadClass (set as last input by @@ -4106,26 +4130,18 @@ class HInvokeStaticOrDirect FINAL : public HInvoke { void RemoveInputAt(size_t index); private: - static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits; - static constexpr size_t kFieldOptimizedInvokeTypeSize = - MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType)); - static constexpr size_t kFieldClinitCheckRequirement = - kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize; + static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits; static constexpr size_t kFieldClinitCheckRequirementSize = MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast)); static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits = kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize; static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields."); - using OptimizedInvokeTypeField = - BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>; using ClinitCheckRequirementField = BitField<ClinitCheckRequirement, kFieldClinitCheckRequirement, kFieldClinitCheckRequirementSize>; - // The target method may refer to different dex file or method index than the original - // invoke. This happens for sharpened calls and for calls where a method was redeclared - // in derived class to increase visibility. + // Cached values of the resolved method, to avoid needing the mutator lock. MethodReference target_method_; DispatchInfo dispatch_info_; @@ -4141,8 +4157,16 @@ class HInvokeVirtual FINAL : public HInvoke { Primitive::Type return_type, uint32_t dex_pc, uint32_t dex_method_index, + ArtMethod* resolved_method, uint32_t vtable_index) - : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual), + : HInvoke(arena, + number_of_arguments, + 0u, + return_type, + dex_pc, + dex_method_index, + resolved_method, + kVirtual), vtable_index_(vtable_index) {} bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { @@ -4155,6 +4179,7 @@ class HInvokeVirtual FINAL : public HInvoke { DECLARE_INSTRUCTION(InvokeVirtual); private: + // Cached value of the resolved method, to avoid needing the mutator lock. const uint32_t vtable_index_; DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual); @@ -4167,8 +4192,16 @@ class HInvokeInterface FINAL : public HInvoke { Primitive::Type return_type, uint32_t dex_pc, uint32_t dex_method_index, + ArtMethod* resolved_method, uint32_t imt_index) - : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface), + : HInvoke(arena, + number_of_arguments, + 0u, + return_type, + dex_pc, + dex_method_index, + resolved_method, + kInterface), imt_index_(imt_index) {} bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { @@ -4182,6 +4215,7 @@ class HInvokeInterface FINAL : public HInvoke { DECLARE_INSTRUCTION(InvokeInterface); private: + // Cached value of the resolved method, to avoid needing the mutator lock. const uint32_t imt_index_; DISALLOW_COPY_AND_ASSIGN(HInvokeInterface); @@ -4363,7 +4397,7 @@ class HDiv FINAL : public HBinaryOperation { HInstruction* left, HInstruction* right, uint32_t dex_pc) - : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} + : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} template <typename T> T ComputeIntegral(T x, T y) const { @@ -4398,11 +4432,6 @@ class HDiv FINAL : public HBinaryOperation { ComputeFP(x->GetValue(), y->GetValue()), GetDexPc()); } - static SideEffects SideEffectsForArchRuntimeCalls() { - // The generated code can use a runtime call. - return SideEffects::CanTriggerGC(); - } - DECLARE_INSTRUCTION(Div); private: @@ -4415,7 +4444,7 @@ class HRem FINAL : public HBinaryOperation { HInstruction* left, HInstruction* right, uint32_t dex_pc) - : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {} + : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {} template <typename T> T ComputeIntegral(T x, T y) const { @@ -4450,10 +4479,6 @@ class HRem FINAL : public HBinaryOperation { ComputeFP(x->GetValue(), y->GetValue()), GetDexPc()); } - static SideEffects SideEffectsForArchRuntimeCalls() { - return SideEffects::CanTriggerGC(); - } - DECLARE_INSTRUCTION(Rem); private: @@ -4906,9 +4931,7 @@ class HTypeConversion FINAL : public HExpression<1> { public: // Instantiate a type conversion of `input` to `result_type`. HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc) - : HExpression(result_type, - SideEffectsForArchRuntimeCalls(input->GetType(), result_type), - dex_pc) { + : HExpression(result_type, SideEffects::None(), dex_pc) { SetRawInputAt(0, input); // Invariant: We should never generate a conversion to a Boolean value. DCHECK_NE(Primitive::kPrimBoolean, result_type); @@ -4927,18 +4950,6 @@ class HTypeConversion FINAL : public HExpression<1> { // containing the result. If the input cannot be converted, return nullptr. HConstant* TryStaticEvaluation() const; - static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type, - Primitive::Type result_type) { - // Some architectures may not require the 'GC' side effects, but at this point - // in the compilation process we do not know what architecture we will - // generate code for, so we must be conservative. - if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type)) - || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) { - return SideEffects::CanTriggerGC(); - } - return SideEffects::None(); - } - DECLARE_INSTRUCTION(TypeConversion); private: @@ -5020,9 +5031,7 @@ class HInstanceFieldGet FINAL : public HExpression<1> { const DexFile& dex_file, Handle<mirror::DexCache> dex_cache, uint32_t dex_pc) - : HExpression(field_type, - SideEffects::FieldReadOfType(field_type, is_volatile), - dex_pc), + : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), field_info_(field_offset, field_type, is_volatile, @@ -5073,8 +5082,7 @@ class HInstanceFieldSet FINAL : public HTemplateInstruction<2> { const DexFile& dex_file, Handle<mirror::DexCache> dex_cache, uint32_t dex_pc) - : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), - dex_pc), + : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), field_info_(field_offset, field_type, is_volatile, @@ -5441,7 +5449,8 @@ class HLoadClass FINAL : public HInstruction { bool is_referrers_class, uint32_t dex_pc, bool needs_access_check, - bool is_in_dex_cache) + bool is_in_dex_cache, + bool is_in_boot_image) : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc), special_input_(HUserRecord<HInstruction*>(current_method)), type_index_(type_index), @@ -5455,6 +5464,7 @@ class HLoadClass FINAL : public HInstruction { is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod); SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check); SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache); + SetPackedFlag<kFlagIsInBootImage>(is_in_boot_image); SetPackedFlag<kFlagGenerateClInitCheck>(false); } @@ -5545,6 +5555,7 @@ class HLoadClass FINAL : public HInstruction { bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; } bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); } bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); } + bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); } bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); } void MarkInDexCache() { @@ -5554,6 +5565,10 @@ class HLoadClass FINAL : public HInstruction { SetSideEffects(SideEffects::None()); } + void MarkInBootImage() { + SetPackedFlag<kFlagIsInBootImage>(true); + } + void AddSpecialInput(HInstruction* special_input); using HInstruction::GetInputRecords; // Keep the const version visible. @@ -5571,9 +5586,10 @@ class HLoadClass FINAL : public HInstruction { private: static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits; static constexpr size_t kFlagIsInDexCache = kFlagNeedsAccessCheck + 1; + static constexpr size_t kFlagIsInBootImage = kFlagIsInDexCache + 1; // Whether this instruction must generate the initialization check. // Used for code generation. - static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInDexCache + 1; + static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1; static constexpr size_t kFieldLoadKind = kFlagGenerateClInitCheck + 1; static constexpr size_t kFieldLoadKindSize = MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast)); @@ -5658,10 +5674,9 @@ class HLoadString FINAL : public HInstruction { // Used for strings outside the boot image referenced by JIT-compiled code. kDexCacheAddress, - // Load from resolved strings array in the dex cache using a PC-relative load. - // Used for strings outside boot image when we know that we can access - // the dex cache arrays using a PC-relative load. - kDexCachePcRelative, + // Load from an entry in the .bss section using a PC-relative load. + // Used for strings outside boot image when .bss is accessible with a PC-relative load. + kBssEntry, // Load from resolved strings array accessed through the class loaded from // the compiled method's own ArtMethod*. This is the default access type when @@ -5680,7 +5695,7 @@ class HLoadString FINAL : public HInstruction { string_index_(string_index) { SetPackedFlag<kFlagIsInDexCache>(false); SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod); - load_data_.ref.dex_file = &dex_file; + load_data_.dex_file_ = &dex_file; } void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) { @@ -5693,20 +5708,11 @@ class HLoadString FINAL : public HInstruction { const DexFile& dex_file, uint32_t string_index) { DCHECK(HasStringReference(load_kind)); - load_data_.ref.dex_file = &dex_file; + load_data_.dex_file_ = &dex_file; string_index_ = string_index; SetLoadKindInternal(load_kind); } - void SetLoadKindWithDexCacheReference(LoadKind load_kind, - const DexFile& dex_file, - uint32_t element_index) { - DCHECK(HasDexCacheReference(load_kind)); - load_data_.ref.dex_file = &dex_file; - load_data_.ref.dex_cache_element_index = element_index; - SetLoadKindInternal(load_kind); - } - LoadKind GetLoadKind() const { return GetPackedField<LoadKindField>(); } @@ -5718,8 +5724,6 @@ class HLoadString FINAL : public HInstruction { return string_index_; } - uint32_t GetDexCacheElementOffset() const; - uint64_t GetAddress() const { DCHECK(HasAddress(GetLoadKind())); return load_data_.address; @@ -5789,6 +5793,7 @@ class HLoadString FINAL : public HInstruction { static bool HasStringReference(LoadKind load_kind) { return load_kind == LoadKind::kBootImageLinkTimeAddress || load_kind == LoadKind::kBootImageLinkTimePcRelative || + load_kind == LoadKind::kBssEntry || load_kind == LoadKind::kDexCacheViaMethod; } @@ -5796,10 +5801,6 @@ class HLoadString FINAL : public HInstruction { return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress; } - static bool HasDexCacheReference(LoadKind load_kind) { - return load_kind == LoadKind::kDexCachePcRelative; - } - void SetLoadKindInternal(LoadKind load_kind); // The special input is the HCurrentMethod for kDexCacheViaMethod. @@ -5812,10 +5813,7 @@ class HLoadString FINAL : public HInstruction { uint32_t string_index_; union { - struct { - const DexFile* dex_file; // For string reference and dex cache reference. - uint32_t dex_cache_element_index; // Only for dex cache reference. - } ref; + const DexFile* dex_file_; // For string reference. uint64_t address; // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets. } load_data_; @@ -5825,15 +5823,8 @@ std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs); // Note: defined outside class to see operator<<(., HLoadString::LoadKind). inline const DexFile& HLoadString::GetDexFile() const { - DCHECK(HasStringReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind())) - << GetLoadKind(); - return *load_data_.ref.dex_file; -} - -// Note: defined outside class to see operator<<(., HLoadString::LoadKind). -inline uint32_t HLoadString::GetDexCacheElementOffset() const { - DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind(); - return load_data_.ref.dex_cache_element_index; + DCHECK(HasStringReference(GetLoadKind())) << GetLoadKind(); + return *load_data_.dex_file_; } // Note: defined outside class to see operator<<(., HLoadString::LoadKind). @@ -5841,7 +5832,7 @@ inline void HLoadString::AddSpecialInput(HInstruction* special_input) { // The special input is used for PC-relative loads on some architectures, // including literal pool loads, which are PC-relative too. DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative || - GetLoadKind() == LoadKind::kDexCachePcRelative || + GetLoadKind() == LoadKind::kBssEntry || GetLoadKind() == LoadKind::kBootImageLinkTimeAddress || GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind(); // HLoadString::GetInputRecords() returns an empty array at this point, @@ -5895,9 +5886,7 @@ class HStaticFieldGet FINAL : public HExpression<1> { const DexFile& dex_file, Handle<mirror::DexCache> dex_cache, uint32_t dex_pc) - : HExpression(field_type, - SideEffects::FieldReadOfType(field_type, is_volatile), - dex_pc), + : HExpression(field_type, SideEffects::FieldReadOfType(field_type, is_volatile), dex_pc), field_info_(field_offset, field_type, is_volatile, @@ -5945,8 +5934,7 @@ class HStaticFieldSet FINAL : public HTemplateInstruction<2> { const DexFile& dex_file, Handle<mirror::DexCache> dex_cache, uint32_t dex_pc) - : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), - dex_pc), + : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile), dex_pc), field_info_(field_offset, field_type, is_volatile, @@ -6223,7 +6211,7 @@ class HInstanceOf FINAL : public HExpression<2> { class HBoundType FINAL : public HExpression<1> { public: - HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc) + explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc) : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc), upper_bound_(ReferenceTypeInfo::CreateInvalid()) { SetPackedFlag<kFlagUpperCanBeNull>(true); @@ -6644,95 +6632,6 @@ class HGraphDelegateVisitor : public HGraphVisitor { DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor); }; -class HInsertionOrderIterator : public ValueObject { - public: - explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {} - - bool Done() const { return index_ == graph_.GetBlocks().size(); } - HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; } - void Advance() { ++index_; } - - private: - const HGraph& graph_; - size_t index_; - - DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator); -}; - -class HReversePostOrderIterator : public ValueObject { - public: - explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) { - // Check that reverse post order of the graph has been built. - DCHECK(!graph.GetReversePostOrder().empty()); - } - - bool Done() const { return index_ == graph_.GetReversePostOrder().size(); } - HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; } - void Advance() { ++index_; } - - private: - const HGraph& graph_; - size_t index_; - - DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator); -}; - -class HPostOrderIterator : public ValueObject { - public: - explicit HPostOrderIterator(const HGraph& graph) - : graph_(graph), index_(graph_.GetReversePostOrder().size()) { - // Check that reverse post order of the graph has been built. - DCHECK(!graph.GetReversePostOrder().empty()); - } - - bool Done() const { return index_ == 0; } - HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; } - void Advance() { --index_; } - - private: - const HGraph& graph_; - size_t index_; - - DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator); -}; - -class HLinearPostOrderIterator : public ValueObject { - public: - explicit HLinearPostOrderIterator(const HGraph& graph) - : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {} - - bool Done() const { return index_ == 0; } - - HBasicBlock* Current() const { return order_[index_ - 1u]; } - - void Advance() { - --index_; - DCHECK_GE(index_, 0U); - } - - private: - const ArenaVector<HBasicBlock*>& order_; - size_t index_; - - DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator); -}; - -class HLinearOrderIterator : public ValueObject { - public: - explicit HLinearOrderIterator(const HGraph& graph) - : order_(graph.GetLinearOrder()), index_(0) {} - - bool Done() const { return index_ == order_.size(); } - HBasicBlock* Current() const { return order_[index_]; } - void Advance() { ++index_; } - - private: - const ArenaVector<HBasicBlock*>& order_; - size_t index_; - - DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator); -}; - // Iterator over the blocks that art part of the loop. Includes blocks part // of an inner loop. The order in which the blocks are iterated is on their // block id. |