diff options
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 47 | ||||
-rw-r--r-- | compiler/optimizing/inliner.cc | 118 | ||||
-rw-r--r-- | compiler/optimizing/inliner.h | 5 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 12 | ||||
-rw-r--r-- | compiler/optimizing/reference_type_propagation.cc | 17 | ||||
-rw-r--r-- | compiler/optimizing/reference_type_propagation.h | 7 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_stream.cc | 48 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_stream.h | 1 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_test.cc | 318 |
9 files changed, 345 insertions, 228 deletions
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 04952beb41..f7731069fb 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -1638,8 +1638,7 @@ void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) { // Processing a Dex `long-to-double' instruction. locations->SetInAt(0, Location::RequiresRegister()); locations->SetOut(Location::RequiresFpuRegister()); - locations->AddTemp(Location::RequiresRegister()); - locations->AddTemp(Location::RequiresRegister()); + locations->AddTemp(Location::RequiresFpuRegister()); locations->AddTemp(Location::RequiresFpuRegister()); break; @@ -1857,29 +1856,21 @@ void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversio Register high = in.AsRegisterPairHigh<Register>(); SRegister out_s = out.AsFpuRegisterPairLow<SRegister>(); DRegister out_d = FromLowSToD(out_s); - Register constant_low = locations->GetTemp(0).AsRegister<Register>(); - Register constant_high = locations->GetTemp(1).AsRegister<Register>(); - SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>(); + SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>(); DRegister temp_d = FromLowSToD(temp_s); - - // out_d = int-to-double(high) - __ vmovsr(out_s, high); - __ vcvtdi(out_d, out_s); - // Using vmovd to load the `k2Pow32EncodingForDouble` constant - // as an immediate value into `temp_d` does not work, as - // this instruction only transfers 8 significant bits of its - // immediate operand. Instead, use two 32-bit core - // registers to load `k2Pow32EncodingForDouble` into `temp_d`. - __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble)); - __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble)); - __ vmovdrr(temp_d, constant_low, constant_high); - // out_d = out_d * 2^32 - __ vmuld(out_d, out_d, temp_d); - // temp_d = unsigned-to-double(low) - __ vmovsr(temp_s, low); - __ vcvtdu(temp_d, temp_s); - // out_d = out_d + temp_d - __ vaddd(out_d, out_d, temp_d); + SRegister constant_s = locations->GetTemp(1).AsFpuRegisterPairLow<SRegister>(); + DRegister constant_d = FromLowSToD(constant_s); + + // temp_d = int-to-double(high) + __ vmovsr(temp_s, high); + __ vcvtdi(temp_d, temp_s); + // constant_d = k2Pow32EncodingForDouble + __ LoadDImmediate(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble)); + // out_d = unsigned-to-double(low) + __ vmovsr(out_s, low); + __ vcvtdu(out_d, out_s); + // out_d += temp_d * constant_d + __ vmlad(out_d, temp_d, constant_d); break; } @@ -2910,22 +2901,22 @@ void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) { void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) { // TODO (ported from quick): revisit Arm barrier kinds - DmbOptions flavour = DmbOptions::ISH; // quiet c++ warnings + DmbOptions flavor = DmbOptions::ISH; // quiet c++ warnings switch (kind) { case MemBarrierKind::kAnyStore: case MemBarrierKind::kLoadAny: case MemBarrierKind::kAnyAny: { - flavour = DmbOptions::ISH; + flavor = DmbOptions::ISH; break; } case MemBarrierKind::kStoreStore: { - flavour = DmbOptions::ISHST; + flavor = DmbOptions::ISHST; break; } default: LOG(FATAL) << "Unexpected memory barrier " << kind; } - __ dmb(flavour); + __ dmb(flavor); } void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr, diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 83f8d83bc4..07d0dd6b49 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -27,6 +27,7 @@ #include "mirror/class_loader.h" #include "mirror/dex_cache.h" #include "nodes.h" +#include "reference_type_propagation.h" #include "register_allocator.h" #include "ssa_phi_elimination.h" #include "scoped_thread_state_change.h" @@ -57,7 +58,7 @@ void HInliner::Run() { next_block = (i == blocks.Size() - 1) ? nullptr : blocks.Get(i + 1); for (HInstruction* instruction = block->GetFirstInstruction(); instruction != nullptr;) { HInstruction* next = instruction->GetNext(); - HInvokeStaticOrDirect* call = instruction->AsInvokeStaticOrDirect(); + HInvoke* call = instruction->AsInvoke(); // As long as the call is not intrinsified, it is worth trying to inline. if (call != nullptr && call->GetIntrinsic() == Intrinsics::kNone) { // We use the original invoke type to ensure the resolution of the called method @@ -69,6 +70,13 @@ void HInliner::Run() { bool should_inline = callee_name.find("$inline$") != std::string::npos; CHECK(!should_inline) << "Could not inline " << callee_name; } + } else { + if (kIsDebugBuild) { + std::string callee_name = + PrettyMethod(call->GetDexMethodIndex(), *outer_compilation_unit_.GetDexFile()); + bool must_not_inline = callee_name.find("$noinline$") != std::string::npos; + CHECK(!must_not_inline) << "Should not have inlined " << callee_name; + } } } instruction = next; @@ -76,6 +84,82 @@ void HInliner::Run() { } } +static bool IsMethodOrDeclaringClassFinal(ArtMethod* method) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + return method->IsFinal() || method->GetDeclaringClass()->IsFinal(); +} + +/** + * Given the `resolved_method` looked up in the dex cache, try to find + * the actual runtime target of an interface or virtual call. + * Return nullptr if the runtime target cannot be proven. + */ +static ArtMethod* FindVirtualOrInterfaceTarget(HInvoke* invoke, ArtMethod* resolved_method) + SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { + if (IsMethodOrDeclaringClassFinal(resolved_method)) { + // No need to lookup further, the resolved method will be the target. + return resolved_method; + } + + HInstruction* receiver = invoke->InputAt(0); + if (receiver->IsNullCheck()) { + // Due to multiple levels of inlining within the same pass, it might be that + // null check does not have the reference type of the actual receiver. + receiver = receiver->InputAt(0); + } + ReferenceTypeInfo info = receiver->GetReferenceTypeInfo(); + if (info.IsTop()) { + // We have no information on the receiver. + return nullptr; + } else if (!info.IsExact()) { + // We currently only support inlining with known receivers. + // TODO: Remove this check, we should be able to inline final methods + // on unknown receivers. + return nullptr; + } else if (info.GetTypeHandle()->IsInterface()) { + // Statically knowing that the receiver has an interface type cannot + // help us find what is the target method. + return nullptr; + } else if (!resolved_method->GetDeclaringClass()->IsAssignableFrom(info.GetTypeHandle().Get())) { + // The method that we're trying to call is not in the receiver's class or super classes. + return nullptr; + } + + ClassLinker* cl = Runtime::Current()->GetClassLinker(); + size_t pointer_size = cl->GetImagePointerSize(); + if (invoke->IsInvokeInterface()) { + resolved_method = info.GetTypeHandle()->FindVirtualMethodForInterface( + resolved_method, pointer_size); + } else { + DCHECK(invoke->IsInvokeVirtual()); + resolved_method = info.GetTypeHandle()->FindVirtualMethodForVirtual( + resolved_method, pointer_size); + } + + if (resolved_method == nullptr) { + // The information we had on the receiver was not enough to find + // the target method. Since we check above the exact type of the receiver, + // the only reason this can happen is an IncompatibleClassChangeError. + return nullptr; + } else if (resolved_method->IsAbstract()) { + // The information we had on the receiver was not enough to find + // the target method. Since we check above the exact type of the receiver, + // the only reason this can happen is an IncompatibleClassChangeError. + return nullptr; + } else if (IsMethodOrDeclaringClassFinal(resolved_method)) { + // A final method has to be the target method. + return resolved_method; + } else if (info.IsExact()) { + // If we found a method and the receiver's concrete type is statically + // known, we know for sure the target. + return resolved_method; + } else { + // Even if we did find a method, the receiver type was not enough to + // statically find the runtime target. + return nullptr; + } +} + bool HInliner::TryInline(HInvoke* invoke_instruction, uint32_t method_index, InvokeType invoke_type) const { @@ -83,19 +167,34 @@ bool HInliner::TryInline(HInvoke* invoke_instruction, const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile(); VLOG(compiler) << "Try inlining " << PrettyMethod(method_index, caller_dex_file); - StackHandleScope<3> hs(soa.Self()); - Handle<mirror::DexCache> dex_cache( - hs.NewHandle(caller_compilation_unit_.GetClassLinker()->FindDexCache(caller_dex_file))); - Handle<mirror::ClassLoader> class_loader(hs.NewHandle( - soa.Decode<mirror::ClassLoader*>(caller_compilation_unit_.GetClassLoader()))); - ArtMethod* resolved_method(compiler_driver_->ResolveMethod( - soa, dex_cache, class_loader, &caller_compilation_unit_, method_index, invoke_type)); + ArtMethod* resolved_method = nullptr; + { + // Don't keep this handle scope on stack, otherwise we cannot do a reference type + // propagation while inlining. + StackHandleScope<2> hs(soa.Self()); + Handle<mirror::DexCache> dex_cache( + hs.NewHandle(caller_compilation_unit_.GetClassLinker()->FindDexCache(caller_dex_file))); + Handle<mirror::ClassLoader> class_loader(hs.NewHandle( + soa.Decode<mirror::ClassLoader*>(caller_compilation_unit_.GetClassLoader()))); + resolved_method = compiler_driver_->ResolveMethod( + soa, dex_cache, class_loader, &caller_compilation_unit_, method_index, invoke_type); + } if (resolved_method == nullptr) { VLOG(compiler) << "Method cannot be resolved " << PrettyMethod(method_index, caller_dex_file); return false; } + if (!invoke_instruction->IsInvokeStaticOrDirect()) { + resolved_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method); + if (resolved_method == nullptr) { + VLOG(compiler) << "Interface or virtual call to " + << PrettyMethod(method_index, caller_dex_file) + << "could not be statically determined"; + return false; + } + } + bool same_dex_file = true; const DexFile& outer_dex_file = *outer_compilation_unit_.GetDexFile(); if (resolved_method->GetDexFile()->GetLocation().compare(outer_dex_file.GetLocation()) != 0) { @@ -238,11 +337,13 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, // Run simple optimizations on the graph. HDeadCodeElimination dce(callee_graph, stats_); HConstantFolding fold(callee_graph); + ReferenceTypePropagation type_propagation(callee_graph, handles_); InstructionSimplifier simplify(callee_graph, stats_); HOptimization* optimizations[] = { &dce, &fold, + &type_propagation, &simplify, }; @@ -256,6 +357,7 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, outer_compilation_unit_, dex_compilation_unit, compiler_driver_, + handles_, stats_, depth_ + 1); inliner.Run(); diff --git a/compiler/optimizing/inliner.h b/compiler/optimizing/inliner.h index b86c1ed505..ca713329f5 100644 --- a/compiler/optimizing/inliner.h +++ b/compiler/optimizing/inliner.h @@ -34,13 +34,15 @@ class HInliner : public HOptimization { const DexCompilationUnit& outer_compilation_unit, const DexCompilationUnit& caller_compilation_unit, CompilerDriver* compiler_driver, + StackHandleScopeCollection* handles, OptimizingCompilerStats* stats, size_t depth = 0) : HOptimization(outer_graph, true, kInlinerPassName, stats), outer_compilation_unit_(outer_compilation_unit), caller_compilation_unit_(caller_compilation_unit), compiler_driver_(compiler_driver), - depth_(depth) {} + depth_(depth), + handles_(handles) {} void Run() OVERRIDE; @@ -57,6 +59,7 @@ class HInliner : public HOptimization { const DexCompilationUnit& caller_compilation_unit_; CompilerDriver* const compiler_driver_; const size_t depth_; + StackHandleScopeCollection* const handles_; DISALLOW_COPY_AND_ASSIGN(HInliner); }; diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index f6ef2f7e82..bf0b9fac0f 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -326,7 +326,7 @@ static void RunOptimizations(HGraph* graph, InstructionSimplifier simplify1(graph, stats); HBooleanSimplifier boolean_simplify(graph); - HInliner inliner(graph, dex_compilation_unit, dex_compilation_unit, driver, stats); + HInliner inliner(graph, dex_compilation_unit, dex_compilation_unit, driver, handles, stats); HConstantFolding fold2(graph, "constant_folding_after_inlining"); SideEffectsAnalysis side_effects(graph); @@ -335,6 +335,8 @@ static void RunOptimizations(HGraph* graph, BoundsCheckElimination bce(graph); ReferenceTypePropagation type_propagation(graph, handles); InstructionSimplifier simplify2(graph, stats, "instruction_simplifier_after_types"); + InstructionSimplifier simplify3(graph, stats, "last_instruction_simplifier"); + ReferenceTypePropagation type_propagation2(graph, handles); IntrinsicsRecognizer intrinsics(graph, driver); @@ -343,7 +345,12 @@ static void RunOptimizations(HGraph* graph, &dce1, &fold1, &simplify1, + &type_propagation, + &simplify2, &inliner, + // Run another type propagation phase: inlining will open up more opprotunities + // to remove checkast/instanceof and null checks. + &type_propagation2, // BooleanSimplifier depends on the InstructionSimplifier removing redundant // suspend checks to recognize empty blocks. &boolean_simplify, @@ -352,8 +359,7 @@ static void RunOptimizations(HGraph* graph, &gvn, &licm, &bce, - &type_propagation, - &simplify2, + &simplify3, &dce2, }; diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc index 4f1f45769d..4edadef1a4 100644 --- a/compiler/optimizing/reference_type_propagation.cc +++ b/compiler/optimizing/reference_type_propagation.cc @@ -166,31 +166,34 @@ void ReferenceTypePropagation::BoundTypeForIfInstanceOf(HBasicBlock* block) { } } -void ReferenceTypePropagation::SetClassAsTypeInfo(HInstruction* instr, mirror::Class* klass) { +void ReferenceTypePropagation::SetClassAsTypeInfo(HInstruction* instr, + mirror::Class* klass, + bool is_exact) { if (klass != nullptr) { ScopedObjectAccess soa(Thread::Current()); MutableHandle<mirror::Class> handle = handles_->NewHandle(klass); - instr->SetReferenceTypeInfo(ReferenceTypeInfo::Create(handle, true)); + instr->SetReferenceTypeInfo(ReferenceTypeInfo::Create(handle, is_exact)); } } void ReferenceTypePropagation::UpdateReferenceTypeInfo(HInstruction* instr, uint16_t type_idx, - const DexFile& dex_file) { + const DexFile& dex_file, + bool is_exact) { DCHECK_EQ(instr->GetType(), Primitive::kPrimNot); ScopedObjectAccess soa(Thread::Current()); mirror::DexCache* dex_cache = Runtime::Current()->GetClassLinker()->FindDexCache(dex_file); // Get type from dex cache assuming it was populated by the verifier. - SetClassAsTypeInfo(instr, dex_cache->GetResolvedType(type_idx)); + SetClassAsTypeInfo(instr, dex_cache->GetResolvedType(type_idx), is_exact); } void ReferenceTypePropagation::VisitNewInstance(HNewInstance* instr) { - UpdateReferenceTypeInfo(instr, instr->GetTypeIndex(), instr->GetDexFile()); + UpdateReferenceTypeInfo(instr, instr->GetTypeIndex(), instr->GetDexFile(), /* is_exact */ true); } void ReferenceTypePropagation::VisitNewArray(HNewArray* instr) { - UpdateReferenceTypeInfo(instr, instr->GetTypeIndex(), instr->GetDexFile()); + UpdateReferenceTypeInfo(instr, instr->GetTypeIndex(), instr->GetDexFile(), /* is_exact */ true); } void ReferenceTypePropagation::UpdateFieldAccessTypeInfo(HInstruction* instr, @@ -206,7 +209,7 @@ void ReferenceTypePropagation::UpdateFieldAccessTypeInfo(HInstruction* instr, ArtField* field = cl->GetResolvedField(info.GetFieldIndex(), dex_cache); DCHECK(field != nullptr); mirror::Class* klass = field->GetType<false>(); - SetClassAsTypeInfo(instr, klass); + SetClassAsTypeInfo(instr, klass, /* is_exact */ false); } void ReferenceTypePropagation::VisitInstanceFieldGet(HInstanceFieldGet* instr) { diff --git a/compiler/optimizing/reference_type_propagation.h b/compiler/optimizing/reference_type_propagation.h index 74e425fb3e..0a1d4c496e 100644 --- a/compiler/optimizing/reference_type_propagation.h +++ b/compiler/optimizing/reference_type_propagation.h @@ -46,14 +46,17 @@ class ReferenceTypePropagation : public HOptimization { void VisitPhi(HPhi* phi); void VisitBasicBlock(HBasicBlock* block); void UpdateFieldAccessTypeInfo(HInstruction* instr, const FieldInfo& info); - void SetClassAsTypeInfo(HInstruction* instr, mirror::Class* klass); + void SetClassAsTypeInfo(HInstruction* instr, mirror::Class* klass, bool is_exact); void UpdateBoundType(HBoundType* bound_type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); void UpdatePhi(HPhi* phi) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); void BoundTypeForIfNotNull(HBasicBlock* block); void BoundTypeForIfInstanceOf(HBasicBlock* block); - void UpdateReferenceTypeInfo(HInstruction* instr, uint16_t type_idx, const DexFile& dex_file); + void UpdateReferenceTypeInfo(HInstruction* instr, + uint16_t type_idx, + const DexFile& dex_file, + bool is_exact); void VisitInstanceFieldGet(HInstanceFieldGet* instr); void VisitStaticFieldGet(HStaticFieldGet* instr); diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc index 5663e3973d..42b9182d55 100644 --- a/compiler/optimizing/stack_map_stream.cc +++ b/compiler/optimizing/stack_map_stream.cc @@ -133,13 +133,13 @@ size_t StackMapStream::PrepareForFillIn() { stack_mask_size_ = RoundUp(stack_mask_number_of_bits, kBitsPerByte) / kBitsPerByte; inline_info_size_ = ComputeInlineInfoSize(); dex_register_maps_size_ = ComputeDexRegisterMapsSize(); - stack_maps_size_ = stack_maps_.Size() - * StackMap::ComputeStackMapSize(stack_mask_size_, - inline_info_size_, - dex_register_maps_size_, - dex_pc_max_, - native_pc_offset_max_, - register_mask_max_); + stack_map_encoding_ = StackMapEncoding::CreateFromSizes(stack_mask_size_, + inline_info_size_, + dex_register_maps_size_, + dex_pc_max_, + native_pc_offset_max_, + register_mask_max_); + stack_maps_size_ = stack_maps_.Size() * stack_map_encoding_.ComputeStackMapSize(); dex_register_location_catalog_size_ = ComputeDexRegisterLocationCatalogSize(); // Note: use RoundUp to word-size here if you want CodeInfo objects to be word aligned. @@ -235,14 +235,9 @@ void StackMapStream::FillIn(MemoryRegion region) { MemoryRegion inline_infos_region = region.Subregion( inline_infos_start_, inline_info_size_); - code_info.SetEncoding(inline_info_size_, - dex_register_maps_size_, - dex_pc_max_, - native_pc_offset_max_, - register_mask_max_); + code_info.SetEncoding(stack_map_encoding_); code_info.SetNumberOfStackMaps(stack_maps_.Size()); - code_info.SetStackMaskSize(stack_mask_size_); - DCHECK_EQ(code_info.GetStackMapsSize(), stack_maps_size_); + DCHECK_EQ(code_info.GetStackMapsSize(code_info.ExtractEncoding()), stack_maps_size_); // Set the Dex register location catalog. code_info.SetNumberOfDexRegisterLocationCatalogEntries(location_catalog_entries_.Size()); @@ -263,26 +258,27 @@ void StackMapStream::FillIn(MemoryRegion region) { uintptr_t next_dex_register_map_offset = 0; uintptr_t next_inline_info_offset = 0; for (size_t i = 0, e = stack_maps_.Size(); i < e; ++i) { - StackMap stack_map = code_info.GetStackMapAt(i); + StackMap stack_map = code_info.GetStackMapAt(i, stack_map_encoding_); StackMapEntry entry = stack_maps_.Get(i); - stack_map.SetDexPc(code_info, entry.dex_pc); - stack_map.SetNativePcOffset(code_info, entry.native_pc_offset); - stack_map.SetRegisterMask(code_info, entry.register_mask); + stack_map.SetDexPc(stack_map_encoding_, entry.dex_pc); + stack_map.SetNativePcOffset(stack_map_encoding_, entry.native_pc_offset); + stack_map.SetRegisterMask(stack_map_encoding_, entry.register_mask); if (entry.sp_mask != nullptr) { - stack_map.SetStackMask(code_info, *entry.sp_mask); + stack_map.SetStackMask(stack_map_encoding_, *entry.sp_mask); } if (entry.num_dex_registers == 0) { // No dex map available. - stack_map.SetDexRegisterMapOffset(code_info, StackMap::kNoDexRegisterMap); + stack_map.SetDexRegisterMapOffset(stack_map_encoding_, StackMap::kNoDexRegisterMap); } else { // Search for an entry with the same dex map. if (entry.same_dex_register_map_as_ != kNoSameDexMapFound) { // If we have a hit reuse the offset. - stack_map.SetDexRegisterMapOffset(code_info, - code_info.GetStackMapAt(entry.same_dex_register_map_as_) - .GetDexRegisterMapOffset(code_info)); + stack_map.SetDexRegisterMapOffset( + stack_map_encoding_, + code_info.GetStackMapAt(entry.same_dex_register_map_as_, stack_map_encoding_) + .GetDexRegisterMapOffset(stack_map_encoding_)); } else { // New dex registers maps should be added to the stack map. MemoryRegion register_region = dex_register_locations_region.Subregion( @@ -291,7 +287,7 @@ void StackMapStream::FillIn(MemoryRegion region) { next_dex_register_map_offset += register_region.size(); DexRegisterMap dex_register_map(register_region); stack_map.SetDexRegisterMapOffset( - code_info, register_region.start() - dex_register_locations_region.start()); + stack_map_encoding_, register_region.start() - dex_register_locations_region.start()); // Set the dex register location. FillInDexRegisterMap(dex_register_map, @@ -311,7 +307,7 @@ void StackMapStream::FillIn(MemoryRegion region) { // Currently relative to the dex register map. stack_map.SetInlineDescriptorOffset( - code_info, inline_region.start() - dex_register_locations_region.start()); + stack_map_encoding_, inline_region.start() - dex_register_locations_region.start()); inline_info.SetDepth(entry.inlining_depth); for (size_t depth = 0; depth < entry.inlining_depth; ++depth) { @@ -341,7 +337,7 @@ void StackMapStream::FillIn(MemoryRegion region) { } } else { if (inline_info_size_ != 0) { - stack_map.SetInlineDescriptorOffset(code_info, StackMap::kNoInlineInfo); + stack_map.SetInlineDescriptorOffset(stack_map_encoding_, StackMap::kNoInlineInfo); } } } diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h index 0af983b1bf..274d573350 100644 --- a/compiler/optimizing/stack_map_stream.h +++ b/compiler/optimizing/stack_map_stream.h @@ -171,6 +171,7 @@ class StackMapStream : public ValueObject { StackMapEntry current_entry_; InlineInfoEntry current_inline_info_; + StackMapEncoding stack_map_encoding_; size_t stack_mask_size_; size_t inline_info_size_; size_t dex_register_maps_size_; diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc index 666fb604c0..b4ac1b4d1a 100644 --- a/compiler/optimizing/stack_map_test.cc +++ b/compiler/optimizing/stack_map_test.cc @@ -51,32 +51,33 @@ TEST(StackMapTest, Test1) { stream.FillIn(region); CodeInfo code_info(region); - ASSERT_EQ(0u, code_info.GetStackMaskSize()); + StackMapEncoding encoding = code_info.ExtractEncoding(); + ASSERT_EQ(0u, encoding.NumberOfBytesForStackMask()); ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); uint32_t number_of_location_catalog_entries = code_info.GetNumberOfDexRegisterLocationCatalogEntries(); ASSERT_EQ(2u, number_of_location_catalog_entries); - DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(); + DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding); // The Dex register location catalog contains: // - one 1-byte short Dex register location, and // - one 5-byte large Dex register location. size_t expected_location_catalog_size = 1u + 5u; ASSERT_EQ(expected_location_catalog_size, location_catalog.Size()); - StackMap stack_map = code_info.GetStackMapAt(0); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64))); - ASSERT_EQ(0u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info)); + StackMap stack_map = code_info.GetStackMapAt(0, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding))); + ASSERT_EQ(0u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding)); - MemoryRegion stack_mask = stack_map.GetStackMask(code_info); + MemoryRegion stack_mask = stack_map.GetStackMask(encoding); ASSERT_TRUE(SameBits(stack_mask, sp_mask)); - ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info)); + ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding)); DexRegisterMap dex_register_map = - code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0)); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1)); ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers)); @@ -86,16 +87,17 @@ TEST(StackMapTest, Test1) { size_t expected_dex_register_map_size = 1u + 1u; ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size()); - ASSERT_EQ(Kind::kInStack, - dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstant, - dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInStack, - dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstantLargeValue, - dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0, number_of_dex_registers, code_info)); - ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info)); + ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding)); size_t index0 = dex_register_map.GetLocationCatalogEntryIndex( 0, number_of_dex_registers, number_of_location_catalog_entries); @@ -112,7 +114,7 @@ TEST(StackMapTest, Test1) { ASSERT_EQ(0, location0.GetValue()); ASSERT_EQ(-2, location1.GetValue()); - ASSERT_FALSE(stack_map.HasInlineInfo(code_info)); + ASSERT_FALSE(stack_map.HasInlineInfo(encoding)); } TEST(StackMapTest, Test2) { @@ -148,13 +150,14 @@ TEST(StackMapTest, Test2) { stream.FillIn(region); CodeInfo code_info(region); - ASSERT_EQ(2u, code_info.GetStackMaskSize()); + StackMapEncoding encoding = code_info.ExtractEncoding(); + ASSERT_EQ(2u, encoding.NumberOfBytesForStackMask()); ASSERT_EQ(2u, code_info.GetNumberOfStackMaps()); uint32_t number_of_location_catalog_entries = code_info.GetNumberOfDexRegisterLocationCatalogEntries(); ASSERT_EQ(4u, number_of_location_catalog_entries); - DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(); + DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding); // The Dex register location catalog contains: // - three 1-byte short Dex register locations, and // - one 5-byte large Dex register location. @@ -163,19 +166,19 @@ TEST(StackMapTest, Test2) { // First stack map. { - StackMap stack_map = code_info.GetStackMapAt(0); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64))); - ASSERT_EQ(0u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info)); - - MemoryRegion stack_mask = stack_map.GetStackMask(code_info); + StackMap stack_map = code_info.GetStackMapAt(0, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding))); + ASSERT_EQ(0u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding)); + + MemoryRegion stack_mask = stack_map.GetStackMask(encoding); ASSERT_TRUE(SameBits(stack_mask, sp_mask1)); - ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info)); + ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding)); DexRegisterMap dex_register_map = - code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0)); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1)); ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers)); @@ -185,16 +188,17 @@ TEST(StackMapTest, Test2) { size_t expected_dex_register_map_size = 1u + 1u; ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size()); - ASSERT_EQ(Kind::kInStack, - dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstant, - dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInStack, - dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstantLargeValue, - dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0, number_of_dex_registers, code_info)); - ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info)); + ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding)); size_t index0 = dex_register_map.GetLocationCatalogEntryIndex( 0, number_of_dex_registers, number_of_location_catalog_entries); @@ -211,8 +215,8 @@ TEST(StackMapTest, Test2) { ASSERT_EQ(0, location0.GetValue()); ASSERT_EQ(-2, location1.GetValue()); - ASSERT_TRUE(stack_map.HasInlineInfo(code_info)); - InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map); + ASSERT_TRUE(stack_map.HasInlineInfo(encoding)); + InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding); ASSERT_EQ(2u, inline_info.GetDepth()); ASSERT_EQ(82u, inline_info.GetMethodIndexAtDepth(0)); ASSERT_EQ(42u, inline_info.GetMethodIndexAtDepth(1)); @@ -224,19 +228,19 @@ TEST(StackMapTest, Test2) { // Second stack map. { - StackMap stack_map = code_info.GetStackMapAt(1); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u))); - ASSERT_EQ(1u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(128u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0xFFu, stack_map.GetRegisterMask(code_info)); - - MemoryRegion stack_mask = stack_map.GetStackMask(code_info); + StackMap stack_map = code_info.GetStackMapAt(1, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u, encoding))); + ASSERT_EQ(1u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(128u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0xFFu, stack_map.GetRegisterMask(encoding)); + + MemoryRegion stack_mask = stack_map.GetStackMask(encoding); ASSERT_TRUE(SameBits(stack_mask, sp_mask2)); - ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info)); + ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding)); DexRegisterMap dex_register_map = - code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0)); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1)); ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers)); @@ -246,16 +250,18 @@ TEST(StackMapTest, Test2) { size_t expected_dex_register_map_size = 1u + 1u; ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size()); - ASSERT_EQ(Kind::kInRegister, - dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInFpuRegister, - dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInRegister, - dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInFpuRegister, - dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(18, dex_register_map.GetMachineRegister(0, number_of_dex_registers, code_info)); - ASSERT_EQ(3, dex_register_map.GetMachineRegister(1, number_of_dex_registers, code_info)); + ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationInternalKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationInternalKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(18, dex_register_map.GetMachineRegister( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(3, dex_register_map.GetMachineRegister( + 1, number_of_dex_registers, code_info, encoding)); size_t index0 = dex_register_map.GetLocationCatalogEntryIndex( 0, number_of_dex_registers, number_of_location_catalog_entries); @@ -272,7 +278,7 @@ TEST(StackMapTest, Test2) { ASSERT_EQ(18, location0.GetValue()); ASSERT_EQ(3, location1.GetValue()); - ASSERT_FALSE(stack_map.HasInlineInfo(code_info)); + ASSERT_FALSE(stack_map.HasInlineInfo(encoding)); } } @@ -294,28 +300,29 @@ TEST(StackMapTest, TestNonLiveDexRegisters) { stream.FillIn(region); CodeInfo code_info(region); - ASSERT_EQ(0u, code_info.GetStackMaskSize()); + StackMapEncoding encoding = code_info.ExtractEncoding(); + ASSERT_EQ(0u, encoding.NumberOfBytesForStackMask()); ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); uint32_t number_of_location_catalog_entries = code_info.GetNumberOfDexRegisterLocationCatalogEntries(); ASSERT_EQ(1u, number_of_location_catalog_entries); - DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(); + DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding); // The Dex register location catalog contains: // - one 5-byte large Dex register location. size_t expected_location_catalog_size = 5u; ASSERT_EQ(expected_location_catalog_size, location_catalog.Size()); - StackMap stack_map = code_info.GetStackMapAt(0); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64))); - ASSERT_EQ(0u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info)); + StackMap stack_map = code_info.GetStackMapAt(0, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding))); + ASSERT_EQ(0u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding)); - ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info)); + ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding)); DexRegisterMap dex_register_map = - code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers); ASSERT_FALSE(dex_register_map.IsDexRegisterLive(0)); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1)); ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers)); @@ -325,15 +332,15 @@ TEST(StackMapTest, TestNonLiveDexRegisters) { size_t expected_dex_register_map_size = 1u + 0u; ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size()); - ASSERT_EQ(Kind::kNone, - dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstant, - dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kNone, - dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstantLargeValue, - dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info)); + ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationInternalKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding)); size_t index0 = dex_register_map.GetLocationCatalogEntryIndex( 0, number_of_dex_registers, number_of_location_catalog_entries); @@ -350,7 +357,7 @@ TEST(StackMapTest, TestNonLiveDexRegisters) { ASSERT_EQ(0, location0.GetValue()); ASSERT_EQ(-2, location1.GetValue()); - ASSERT_FALSE(stack_map.HasInlineInfo(code_info)); + ASSERT_FALSE(stack_map.HasInlineInfo(encoding)); } // Generate a stack map whose dex register offset is @@ -387,6 +394,7 @@ TEST(StackMapTest, DexRegisterMapOffsetOverflow) { stream.FillIn(region); CodeInfo code_info(region); + StackMapEncoding encoding = code_info.ExtractEncoding(); // The location catalog contains two entries (DexRegisterLocation(kConstant, 0) // and DexRegisterLocation(kConstant, 1)), therefore the location catalog index // has a size of 1 bit. @@ -402,20 +410,20 @@ TEST(StackMapTest, DexRegisterMapOffsetOverflow) { // locations (that is, 127 bytes of data). // Hence it has a size of 255 bytes, and therefore... ASSERT_EQ(128u, DexRegisterMap::GetLiveBitMaskSize(number_of_dex_registers)); - StackMap stack_map0 = code_info.GetStackMapAt(0); + StackMap stack_map0 = code_info.GetStackMapAt(0, encoding); DexRegisterMap dex_register_map0 = - code_info.GetDexRegisterMapOf(stack_map0, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map0, encoding, number_of_dex_registers); ASSERT_EQ(127u, dex_register_map0.GetLocationMappingDataSize(number_of_dex_registers, number_of_location_catalog_entries)); ASSERT_EQ(255u, dex_register_map0.Size()); - StackMap stack_map1 = code_info.GetStackMapAt(1); - ASSERT_TRUE(stack_map1.HasDexRegisterMap(code_info)); + StackMap stack_map1 = code_info.GetStackMapAt(1, encoding); + ASSERT_TRUE(stack_map1.HasDexRegisterMap(encoding)); // ...the offset of the second Dex register map (relative to the // beginning of the Dex register maps region) is 255 (i.e., // kNoDexRegisterMapSmallEncoding). - ASSERT_NE(stack_map1.GetDexRegisterMapOffset(code_info), StackMap::kNoDexRegisterMap); - ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(code_info), 0xFFu); + ASSERT_NE(stack_map1.GetDexRegisterMapOffset(encoding), StackMap::kNoDexRegisterMap); + ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(encoding), 0xFFu); } TEST(StackMapTest, TestShareDexRegisterMap) { @@ -447,28 +455,30 @@ TEST(StackMapTest, TestShareDexRegisterMap) { stream.FillIn(region); CodeInfo ci(region); + StackMapEncoding encoding = ci.ExtractEncoding(); + // Verify first stack map. - StackMap sm0 = ci.GetStackMapAt(0); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, number_of_dex_registers); - ASSERT_EQ(0, dex_registers0.GetMachineRegister(0, number_of_dex_registers, ci)); - ASSERT_EQ(-2, dex_registers0.GetConstant(1, number_of_dex_registers, ci)); + StackMap sm0 = ci.GetStackMapAt(0, encoding); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, encoding, number_of_dex_registers); + ASSERT_EQ(0, dex_registers0.GetMachineRegister(0, number_of_dex_registers, ci, encoding)); + ASSERT_EQ(-2, dex_registers0.GetConstant(1, number_of_dex_registers, ci, encoding)); // Verify second stack map. - StackMap sm1 = ci.GetStackMapAt(1); - DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1, number_of_dex_registers); - ASSERT_EQ(0, dex_registers1.GetMachineRegister(0, number_of_dex_registers, ci)); - ASSERT_EQ(-2, dex_registers1.GetConstant(1, number_of_dex_registers, ci)); + StackMap sm1 = ci.GetStackMapAt(1, encoding); + DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1, encoding, number_of_dex_registers); + ASSERT_EQ(0, dex_registers1.GetMachineRegister(0, number_of_dex_registers, ci, encoding)); + ASSERT_EQ(-2, dex_registers1.GetConstant(1, number_of_dex_registers, ci, encoding)); // Verify third stack map. - StackMap sm2 = ci.GetStackMapAt(2); - DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2, number_of_dex_registers); - ASSERT_EQ(2, dex_registers2.GetMachineRegister(0, number_of_dex_registers, ci)); - ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers, ci)); + StackMap sm2 = ci.GetStackMapAt(2, encoding); + DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2, encoding, number_of_dex_registers); + ASSERT_EQ(2, dex_registers2.GetMachineRegister(0, number_of_dex_registers, ci, encoding)); + ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers, ci, encoding)); // Verify dex register map offsets. - ASSERT_EQ(sm0.GetDexRegisterMapOffset(ci), sm1.GetDexRegisterMapOffset(ci)); - ASSERT_NE(sm0.GetDexRegisterMapOffset(ci), sm2.GetDexRegisterMapOffset(ci)); - ASSERT_NE(sm1.GetDexRegisterMapOffset(ci), sm2.GetDexRegisterMapOffset(ci)); + ASSERT_EQ(sm0.GetDexRegisterMapOffset(encoding), sm1.GetDexRegisterMapOffset(encoding)); + ASSERT_NE(sm0.GetDexRegisterMapOffset(encoding), sm2.GetDexRegisterMapOffset(encoding)); + ASSERT_NE(sm1.GetDexRegisterMapOffset(encoding), sm2.GetDexRegisterMapOffset(encoding)); } TEST(StackMapTest, TestNoDexRegisterMap) { @@ -487,24 +497,25 @@ TEST(StackMapTest, TestNoDexRegisterMap) { stream.FillIn(region); CodeInfo code_info(region); - ASSERT_EQ(0u, code_info.GetStackMaskSize()); + StackMapEncoding encoding = code_info.ExtractEncoding(); + ASSERT_EQ(0u, encoding.NumberOfBytesForStackMask()); ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); uint32_t number_of_location_catalog_entries = code_info.GetNumberOfDexRegisterLocationCatalogEntries(); ASSERT_EQ(0u, number_of_location_catalog_entries); - DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(); + DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding); ASSERT_EQ(0u, location_catalog.Size()); - StackMap stack_map = code_info.GetStackMapAt(0); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64))); - ASSERT_EQ(0u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info)); + StackMap stack_map = code_info.GetStackMapAt(0, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding))); + ASSERT_EQ(0u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding)); - ASSERT_FALSE(stack_map.HasDexRegisterMap(code_info)); - ASSERT_FALSE(stack_map.HasInlineInfo(code_info)); + ASSERT_FALSE(stack_map.HasDexRegisterMap(encoding)); + ASSERT_FALSE(stack_map.HasInlineInfo(encoding)); } TEST(StackMapTest, InlineTest) { @@ -579,16 +590,17 @@ TEST(StackMapTest, InlineTest) { stream.FillIn(region); CodeInfo ci(region); + StackMapEncoding encoding = ci.ExtractEncoding(); { // Verify first stack map. - StackMap sm0 = ci.GetStackMapAt(0); + StackMap sm0 = ci.GetStackMapAt(0, encoding); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, 2); - ASSERT_EQ(0, dex_registers0.GetStackOffsetInBytes(0, 2, ci)); - ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci)); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, encoding, 2); + ASSERT_EQ(0, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding)); + ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci, encoding)); - InlineInfo if0 = ci.GetInlineInfoOf(sm0); + InlineInfo if0 = ci.GetInlineInfoOf(sm0, encoding); ASSERT_EQ(2u, if0.GetDepth()); ASSERT_EQ(2u, if0.GetDexPcAtDepth(0)); ASSERT_EQ(42u, if0.GetMethodIndexAtDepth(0)); @@ -597,24 +609,24 @@ TEST(StackMapTest, InlineTest) { ASSERT_EQ(82u, if0.GetMethodIndexAtDepth(1)); ASSERT_EQ(kStatic, if0.GetInvokeTypeAtDepth(1)); - DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, 1); - ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci)); + DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, encoding, 1); + ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding)); - DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if0, 3); - ASSERT_EQ(16, dex_registers2.GetStackOffsetInBytes(0, 3, ci)); - ASSERT_EQ(20, dex_registers2.GetConstant(1, 3, ci)); - ASSERT_EQ(15, dex_registers2.GetMachineRegister(2, 3, ci)); + DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if0, encoding, 3); + ASSERT_EQ(16, dex_registers2.GetStackOffsetInBytes(0, 3, ci, encoding)); + ASSERT_EQ(20, dex_registers2.GetConstant(1, 3, ci, encoding)); + ASSERT_EQ(15, dex_registers2.GetMachineRegister(2, 3, ci, encoding)); } { // Verify second stack map. - StackMap sm1 = ci.GetStackMapAt(1); + StackMap sm1 = ci.GetStackMapAt(1, encoding); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1, 2); - ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci)); - ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci)); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1, encoding, 2); + ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding)); + ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci, encoding)); - InlineInfo if1 = ci.GetInlineInfoOf(sm1); + InlineInfo if1 = ci.GetInlineInfoOf(sm1, encoding); ASSERT_EQ(3u, if1.GetDepth()); ASSERT_EQ(2u, if1.GetDexPcAtDepth(0)); ASSERT_EQ(42u, if1.GetMethodIndexAtDepth(0)); @@ -626,36 +638,36 @@ TEST(StackMapTest, InlineTest) { ASSERT_EQ(52u, if1.GetMethodIndexAtDepth(2)); ASSERT_EQ(kVirtual, if1.GetInvokeTypeAtDepth(2)); - DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, 1); - ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci)); + DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, encoding, 1); + ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding)); - DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if1, 3); - ASSERT_EQ(80, dex_registers2.GetStackOffsetInBytes(0, 3, ci)); - ASSERT_EQ(10, dex_registers2.GetConstant(1, 3, ci)); - ASSERT_EQ(5, dex_registers2.GetMachineRegister(2, 3, ci)); + DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if1, encoding, 3); + ASSERT_EQ(80, dex_registers2.GetStackOffsetInBytes(0, 3, ci, encoding)); + ASSERT_EQ(10, dex_registers2.GetConstant(1, 3, ci, encoding)); + ASSERT_EQ(5, dex_registers2.GetMachineRegister(2, 3, ci, encoding)); ASSERT_FALSE(if1.HasDexRegisterMapAtDepth(2)); } { // Verify third stack map. - StackMap sm2 = ci.GetStackMapAt(2); + StackMap sm2 = ci.GetStackMapAt(2, encoding); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2, 2); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2, encoding, 2); ASSERT_FALSE(dex_registers0.IsDexRegisterLive(0)); - ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci)); - ASSERT_FALSE(sm2.HasInlineInfo(ci)); + ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci, encoding)); + ASSERT_FALSE(sm2.HasInlineInfo(encoding)); } { // Verify fourth stack map. - StackMap sm3 = ci.GetStackMapAt(3); + StackMap sm3 = ci.GetStackMapAt(3, encoding); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3, 2); - ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci)); - ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci)); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3, encoding, 2); + ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding)); + ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci, encoding)); - InlineInfo if2 = ci.GetInlineInfoOf(sm3); + InlineInfo if2 = ci.GetInlineInfoOf(sm3, encoding); ASSERT_EQ(3u, if2.GetDepth()); ASSERT_EQ(2u, if2.GetDexPcAtDepth(0)); ASSERT_EQ(42u, if2.GetMethodIndexAtDepth(0)); @@ -669,12 +681,12 @@ TEST(StackMapTest, InlineTest) { ASSERT_FALSE(if2.HasDexRegisterMapAtDepth(0)); - DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(1, if2, 1); - ASSERT_EQ(2, dex_registers1.GetMachineRegister(0, 1, ci)); + DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(1, if2, encoding, 1); + ASSERT_EQ(2, dex_registers1.GetMachineRegister(0, 1, ci, encoding)); - DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(2, if2, 2); + DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(2, if2, encoding, 2); ASSERT_FALSE(dex_registers2.IsDexRegisterLive(0)); - ASSERT_EQ(3, dex_registers2.GetMachineRegister(1, 2, ci)); + ASSERT_EQ(3, dex_registers2.GetMachineRegister(1, 2, ci, encoding)); } } |