diff options
Diffstat (limited to 'compiler/optimizing')
22 files changed, 318 insertions, 339 deletions
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc index dbda63bae4..f98029da03 100644 --- a/compiler/optimizing/builder.cc +++ b/compiler/optimizing/builder.cc @@ -763,11 +763,6 @@ bool HGraphBuilder::BuildInvoke(const Instruction& instruction, } DCHECK_EQ(argument_index, number_of_arguments); - if (invoke->IsInvokeStaticOrDirect()) { - invoke->SetArgumentAt(argument_index, graph_->GetCurrentMethod()); - argument_index++; - } - if (clinit_check_requirement == HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit) { // Add the class initialization check as last input of `invoke`. DCHECK(clinit_check != nullptr); diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 049b3e3a40..08c0351eab 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -292,6 +292,7 @@ void CodeGenerator::CreateCommonInvokeLocationSummary( HInvoke* invoke, InvokeDexCallingConventionVisitor* visitor) { ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena(); LocationSummary* locations = new (allocator) LocationSummary(invoke, LocationSummary::kCall); + locations->AddTemp(visitor->GetMethodLocation()); for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) { HInstruction* input = invoke->InputAt(i); @@ -299,20 +300,6 @@ void CodeGenerator::CreateCommonInvokeLocationSummary( } locations->SetOut(visitor->GetReturnLocation(invoke->GetType())); - - if (invoke->IsInvokeStaticOrDirect()) { - HInvokeStaticOrDirect* call = invoke->AsInvokeStaticOrDirect(); - if (call->IsStringInit()) { - locations->AddTemp(visitor->GetMethodLocation()); - } else if (call->IsRecursive()) { - locations->SetInAt(call->GetCurrentMethodInputIndex(), visitor->GetMethodLocation()); - } else { - locations->AddTemp(visitor->GetMethodLocation()); - locations->SetInAt(call->GetCurrentMethodInputIndex(), Location::RequiresRegister()); - } - } else { - locations->AddTemp(visitor->GetMethodLocation()); - } } void CodeGenerator::BlockIfInRegister(Location location, bool is_out) const { diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index f4544ea70d..f7731069fb 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -1254,10 +1254,6 @@ void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invok IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(), codegen_->GetInstructionSetFeatures()); if (intrinsic.TryDispatch(invoke)) { - LocationSummary* locations = invoke->GetLocations(); - if (locations->CanCall()) { - locations->SetInAt(invoke->GetCurrentMethodInputIndex(), Location::RequiresRegister()); - } return; } @@ -1287,9 +1283,9 @@ void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirec return; } - LocationSummary* locations = invoke->GetLocations(); - codegen_->GenerateStaticOrDirectCall( - invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation()); + Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); + + codegen_->GenerateStaticOrDirectCall(invoke, temp); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); } @@ -1320,8 +1316,12 @@ void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { Location receiver = locations->InAt(0); uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); // temp = object->GetClass(); - DCHECK(receiver.IsRegister()); - __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset); + if (receiver.IsStackSlot()) { + __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); + __ LoadFromOffset(kLoadWord, temp, temp, class_offset); + } else { + __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset); + } codegen_->MaybeRecordImplicitNullCheck(invoke); // temp = temp->GetMethodAt(method_offset); uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset( @@ -4206,7 +4206,9 @@ void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instr } } -void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) { +void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) { + DCHECK_EQ(temp, kArtMethodRegister); + // TODO: Implement all kinds of calls: // 1) boot -> boot // 2) app -> boot @@ -4215,32 +4217,32 @@ void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, // Currently we implement the app -> app logic, which looks up in the resolve cache. if (invoke->IsStringInit()) { - Register reg = temp.AsRegister<Register>(); // temp = thread->string_init_entrypoint - __ LoadFromOffset(kLoadWord, reg, TR, invoke->GetStringInitOffset()); + __ LoadFromOffset(kLoadWord, temp, TR, invoke->GetStringInitOffset()); // LR = temp[offset_of_quick_compiled_code] - __ LoadFromOffset(kLoadWord, LR, reg, + __ LoadFromOffset(kLoadWord, LR, temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset( kArmWordSize).Int32Value()); // LR() __ blx(LR); - } else if (invoke->IsRecursive()) { - __ bl(GetFrameEntryLabel()); } else { - Register current_method = - invoke->GetLocations()->InAt(invoke->GetCurrentMethodInputIndex()).AsRegister<Register>(); - Register reg = temp.AsRegister<Register>(); - // reg = current_method->dex_cache_resolved_methods_; - __ LoadFromOffset( - kLoadWord, reg, current_method, ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); - // reg = reg[index_in_cache] - __ LoadFromOffset( - kLoadWord, reg, reg, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex())); - // LR = reg[offset_of_quick_compiled_code] - __ LoadFromOffset(kLoadWord, LR, reg, ArtMethod::EntryPointFromQuickCompiledCodeOffset( - kArmWordSize).Int32Value()); - // LR() - __ blx(LR); + // temp = method; + LoadCurrentMethod(temp); + if (!invoke->IsRecursive()) { + // temp = temp->dex_cache_resolved_methods_; + __ LoadFromOffset( + kLoadWord, temp, temp, ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); + // temp = temp[index_in_cache] + __ LoadFromOffset( + kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex())); + // LR = temp[offset_of_quick_compiled_code] + __ LoadFromOffset(kLoadWord, LR, temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset( + kArmWordSize).Int32Value()); + // LR() + __ blx(LR); + } else { + __ bl(GetFrameEntryLabel()); + } } DCHECK(!IsLeafMethod()); diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h index b871acd0ae..d84f2d3d20 100644 --- a/compiler/optimizing/code_generator_arm.h +++ b/compiler/optimizing/code_generator_arm.h @@ -301,7 +301,7 @@ class CodeGeneratorARM : public CodeGenerator { Label* GetFrameEntryLabel() { return &frame_entry_label_; } - void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp); + void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp); private: // Labels for each block that will be compiled. diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index ac99d56676..2f607f70a3 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -484,7 +484,7 @@ Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type } Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const { - return LocationFrom(kArtMethodRegister); + return LocationFrom(x0); } CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, @@ -2227,10 +2227,6 @@ void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* inv IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena()); if (intrinsic.TryDispatch(invoke)) { - LocationSummary* locations = invoke->GetLocations(); - if (locations->CanCall()) { - locations->SetInAt(invoke->GetCurrentMethodInputIndex(), Location::RequiresRegister()); - } return; } @@ -2246,8 +2242,9 @@ static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codege return false; } -void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) { +void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) { // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention. + DCHECK(temp.Is(kArtMethodRegister)); size_t index_in_cache = GetCachePointerOffset(invoke->GetDexMethodIndex()); // TODO: Implement all kinds of calls: @@ -2258,30 +2255,30 @@ void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invok // Currently we implement the app -> app logic, which looks up in the resolve cache. if (invoke->IsStringInit()) { - Register reg = XRegisterFrom(temp); // temp = thread->string_init_entrypoint - __ Ldr(reg.X(), MemOperand(tr, invoke->GetStringInitOffset())); + __ Ldr(temp.X(), MemOperand(tr, invoke->GetStringInitOffset())); // LR = temp->entry_point_from_quick_compiled_code_; __ Ldr(lr, MemOperand( - reg, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value())); + temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value())); // lr() __ Blr(lr); - } else if (invoke->IsRecursive()) { - __ Bl(&frame_entry_label_); } else { - Register current_method = - XRegisterFrom(invoke->GetLocations()->InAt(invoke->GetCurrentMethodInputIndex())); - Register reg = XRegisterFrom(temp); - // temp = current_method->dex_cache_resolved_methods_; - __ Ldr(reg.W(), MemOperand(current_method.X(), - ArtMethod::DexCacheResolvedMethodsOffset().Int32Value())); - // temp = temp[index_in_cache]; - __ Ldr(reg.X(), MemOperand(reg, index_in_cache)); - // lr = temp->entry_point_from_quick_compiled_code_; - __ Ldr(lr, MemOperand(reg.X(), ArtMethod::EntryPointFromQuickCompiledCodeOffset( - kArm64WordSize).Int32Value())); - // lr(); - __ Blr(lr); + // temp = method; + LoadCurrentMethod(temp.X()); + if (!invoke->IsRecursive()) { + // temp = temp->dex_cache_resolved_methods_; + __ Ldr(temp.W(), MemOperand(temp.X(), + ArtMethod::DexCacheResolvedMethodsOffset().Int32Value())); + // temp = temp[index_in_cache]; + __ Ldr(temp.X(), MemOperand(temp, index_in_cache)); + // lr = temp->entry_point_from_quick_compiled_code_; + __ Ldr(lr, MemOperand(temp.X(), ArtMethod::EntryPointFromQuickCompiledCodeOffset( + kArm64WordSize).Int32Value())); + // lr(); + __ Blr(lr); + } else { + __ Bl(&frame_entry_label_); + } } DCHECK(!IsLeafMethod()); @@ -2297,9 +2294,8 @@ void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDir } BlockPoolsScope block_pools(GetVIXLAssembler()); - LocationSummary* locations = invoke->GetLocations(); - codegen_->GenerateStaticOrDirectCall( - invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation()); + Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0)); + codegen_->GenerateStaticOrDirectCall(invoke, temp); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); } @@ -2318,8 +2314,14 @@ void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) { BlockPoolsScope block_pools(GetVIXLAssembler()); - DCHECK(receiver.IsRegister()); - __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset)); + // temp = object->GetClass(); + if (receiver.IsStackSlot()) { + __ Ldr(temp.W(), MemOperand(sp, receiver.GetStackIndex())); + __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset)); + } else { + DCHECK(receiver.IsRegister()); + __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset)); + } codegen_->MaybeRecordImplicitNullCheck(invoke); // temp = temp->GetMethodAt(method_offset); __ Ldr(temp, MemOperand(temp, method_offset)); @@ -2672,7 +2674,7 @@ void InstructionCodeGeneratorARM64::VisitParameterValue( void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); - locations->SetOut(LocationFrom(kArtMethodRegister)); + locations->SetOut(LocationFrom(x0)); } void InstructionCodeGeneratorARM64::VisitCurrentMethod( diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index 32466485ad..c62ba951cd 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -344,7 +344,7 @@ class CodeGeneratorARM64 : public CodeGenerator { return false; } - void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp); + void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, vixl::Register temp); private: // Labels for each block that will be compiled. diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 4065c443fd..8a7b52e549 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -1231,10 +1231,6 @@ void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invok IntrinsicLocationsBuilderX86 intrinsic(codegen_); if (intrinsic.TryDispatch(invoke)) { - LocationSummary* locations = invoke->GetLocations(); - if (locations->CanCall()) { - locations->SetInAt(invoke->GetCurrentMethodInputIndex(), Location::RequiresRegister()); - } return; } @@ -1259,9 +1255,8 @@ void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirec return; } - LocationSummary* locations = invoke->GetLocations(); codegen_->GenerateStaticOrDirectCall( - invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation()); + invoke, invoke->GetLocations()->GetTemp(0).AsRegister<Register>()); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); } @@ -1281,8 +1276,13 @@ void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) { LocationSummary* locations = invoke->GetLocations(); Location receiver = locations->InAt(0); uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); - DCHECK(receiver.IsRegister()); - __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset)); + // temp = object->GetClass(); + if (receiver.IsStackSlot()) { + __ movl(temp, Address(ESP, receiver.GetStackIndex())); + __ movl(temp, Address(temp, class_offset)); + } else { + __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset)); + } codegen_->MaybeRecordImplicitNullCheck(invoke); // temp = temp->GetMethodAt(method_offset); __ movl(temp, Address(temp, method_offset)); @@ -3201,7 +3201,7 @@ void InstructionCodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) { void CodeGeneratorX86::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, - Location temp) { + Register temp) { // TODO: Implement all kinds of calls: // 1) boot -> boot // 2) app -> boot @@ -3211,26 +3211,25 @@ void CodeGeneratorX86::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, if (invoke->IsStringInit()) { // temp = thread->string_init_entrypoint - Register reg = temp.AsRegister<Register>(); - __ fs()->movl(reg, Address::Absolute(invoke->GetStringInitOffset())); + __ fs()->movl(temp, Address::Absolute(invoke->GetStringInitOffset())); // (temp + offset_of_quick_compiled_code)() __ call(Address( - reg, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86WordSize).Int32Value())); - } else if (invoke->IsRecursive()) { - __ call(GetFrameEntryLabel()); + temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86WordSize).Int32Value())); } else { - Register current_method = - invoke->GetLocations()->InAt(invoke->GetCurrentMethodInputIndex()).AsRegister<Register>(); - Register reg = temp.AsRegister<Register>(); - // temp = temp->dex_cache_resolved_methods_; - __ movl(reg, Address( - current_method, ArtMethod::DexCacheResolvedMethodsOffset().Int32Value())); - // temp = temp[index_in_cache] - __ movl(reg, Address(reg, - CodeGenerator::GetCachePointerOffset(invoke->GetDexMethodIndex()))); - // (temp + offset_of_quick_compiled_code)() - __ call(Address(reg, - ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86WordSize).Int32Value())); + // temp = method; + LoadCurrentMethod(temp); + if (!invoke->IsRecursive()) { + // temp = temp->dex_cache_resolved_methods_; + __ movl(temp, Address(temp, ArtMethod::DexCacheResolvedMethodsOffset().Int32Value())); + // temp = temp[index_in_cache] + __ movl(temp, Address(temp, + CodeGenerator::GetCachePointerOffset(invoke->GetDexMethodIndex()))); + // (temp + offset_of_quick_compiled_code)() + __ call(Address(temp, + ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86WordSize).Int32Value())); + } else { + __ call(GetFrameEntryLabel()); + } } DCHECK(!IsLeafMethod()); diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h index b8553d266d..61827a45ab 100644 --- a/compiler/optimizing/code_generator_x86.h +++ b/compiler/optimizing/code_generator_x86.h @@ -263,7 +263,7 @@ class CodeGeneratorX86 : public CodeGenerator { void Move64(Location destination, Location source); // Generate a call to a static or direct method. - void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp); + void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp); // Emit a write barrier. void MarkGCCard(Register temp, diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index c9fe813f20..a2a3cf523c 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -360,7 +360,7 @@ inline Condition X86_64Condition(IfCondition cond) { } void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, - Location temp) { + CpuRegister temp) { // All registers are assumed to be correctly set up. // TODO: Implement all kinds of calls: @@ -371,28 +371,26 @@ void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invo // Currently we implement the app -> app logic, which looks up in the resolve cache. if (invoke->IsStringInit()) { - CpuRegister reg = temp.AsRegister<CpuRegister>(); // temp = thread->string_init_entrypoint - __ gs()->movl(reg, Address::Absolute(invoke->GetStringInitOffset())); + __ gs()->movl(temp, Address::Absolute(invoke->GetStringInitOffset())); // (temp + offset_of_quick_compiled_code)() - __ call(Address(reg, ArtMethod::EntryPointFromQuickCompiledCodeOffset( + __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset( kX86_64WordSize).SizeValue())); - } else if (invoke->IsRecursive()) { - __ call(&frame_entry_label_); } else { - LocationSummary* locations = invoke->GetLocations(); - CpuRegister reg = temp.AsRegister<CpuRegister>(); - CpuRegister current_method = - locations->InAt(invoke->GetCurrentMethodInputIndex()).AsRegister<CpuRegister>(); - // temp = temp->dex_cache_resolved_methods_; - __ movl(reg, Address( - current_method, ArtMethod::DexCacheResolvedMethodsOffset().SizeValue())); - // temp = temp[index_in_cache] - __ movq(reg, Address( - reg, CodeGenerator::GetCachePointerOffset(invoke->GetDexMethodIndex()))); - // (temp + offset_of_quick_compiled_code)() - __ call(Address(reg, ArtMethod::EntryPointFromQuickCompiledCodeOffset( - kX86_64WordSize).SizeValue())); + // temp = method; + LoadCurrentMethod(temp); + if (!invoke->IsRecursive()) { + // temp = temp->dex_cache_resolved_methods_; + __ movl(temp, Address(temp, ArtMethod::DexCacheResolvedMethodsOffset().SizeValue())); + // temp = temp[index_in_cache] + __ movq(temp, Address( + temp, CodeGenerator::GetCachePointerOffset(invoke->GetDexMethodIndex()))); + // (temp + offset_of_quick_compiled_code)() + __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset( + kX86_64WordSize).SizeValue())); + } else { + __ call(&frame_entry_label_); + } } DCHECK(!IsLeafMethod()); @@ -1336,10 +1334,6 @@ void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* in IntrinsicLocationsBuilderX86_64 intrinsic(codegen_); if (intrinsic.TryDispatch(invoke)) { - LocationSummary* locations = invoke->GetLocations(); - if (locations->CanCall()) { - locations->SetInAt(invoke->GetCurrentMethodInputIndex(), Location::RequiresRegister()); - } return; } @@ -1364,9 +1358,9 @@ void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDi return; } - LocationSummary* locations = invoke->GetLocations(); codegen_->GenerateStaticOrDirectCall( - invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation()); + invoke, + invoke->GetLocations()->GetTemp(0).AsRegister<CpuRegister>()); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); } @@ -1396,8 +1390,12 @@ void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) Location receiver = locations->InAt(0); size_t class_offset = mirror::Object::ClassOffset().SizeValue(); // temp = object->GetClass(); - DCHECK(receiver.IsRegister()); - __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset)); + if (receiver.IsStackSlot()) { + __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex())); + __ movl(temp, Address(temp, class_offset)); + } else { + __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset)); + } codegen_->MaybeRecordImplicitNullCheck(invoke); // temp = temp->GetMethodAt(method_offset); __ movq(temp, Address(temp, method_offset)); diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index 61f863c162..c19e686c10 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -277,7 +277,7 @@ class CodeGeneratorX86_64 : public CodeGenerator { return false; } - void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp); + void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, CpuRegister temp); const X86_64InstructionSetFeatures& GetInstructionSetFeatures() const { return isa_features_; diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc index 749bedf99e..5436ec2dd9 100644 --- a/compiler/optimizing/intrinsics_arm.cc +++ b/compiler/optimizing/intrinsics_arm.cc @@ -101,8 +101,7 @@ class IntrinsicSlowPathARM : public SlowPathCodeARM { MoveArguments(invoke_, codegen); if (invoke_->IsInvokeStaticOrDirect()) { - codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), - Location::RegisterLocation(kArtMethodRegister)); + codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister); RecordPcInfo(codegen, invoke_, invoke_->GetDexPc()); } else { UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented"; diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index c108ad5daa..d1dc5b3843 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -110,8 +110,7 @@ class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 { MoveArguments(invoke_, codegen); if (invoke_->IsInvokeStaticOrDirect()) { - codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), - LocationFrom(kArtMethodRegister)); + codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister); RecordPcInfo(codegen, invoke_, invoke_->GetDexPc()); } else { UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented"; diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 424ac7c855..5bbbc72020 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -138,8 +138,7 @@ class IntrinsicSlowPathX86 : public SlowPathCodeX86 { MoveArguments(invoke_, codegen); if (invoke_->IsInvokeStaticOrDirect()) { - codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), - Location::RegisterLocation(EAX)); + codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), EAX); RecordPcInfo(codegen, invoke_, invoke_->GetDexPc()); } else { UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented"; @@ -733,8 +732,7 @@ static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) MoveArguments(invoke, codegen); DCHECK(invoke->IsInvokeStaticOrDirect()); - codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(), - Location::RegisterLocation(EAX)); + codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(), EAX); codegen->RecordPcInfo(invoke, invoke->GetDexPc()); // Copy the result back to the expected output. diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 891531435e..d6c90ff510 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -129,8 +129,7 @@ class IntrinsicSlowPathX86_64 : public SlowPathCodeX86_64 { MoveArguments(invoke_, codegen); if (invoke_->IsInvokeStaticOrDirect()) { - codegen->GenerateStaticOrDirectCall( - invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(RDI)); + codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), CpuRegister(RDI)); RecordPcInfo(codegen, invoke_, invoke_->GetDexPc()); } else { UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented"; @@ -610,8 +609,7 @@ static void InvokeOutOfLineIntrinsic(CodeGeneratorX86_64* codegen, HInvoke* invo MoveArguments(invoke, codegen); DCHECK(invoke->IsInvokeStaticOrDirect()); - codegen->GenerateStaticOrDirectCall( - invoke->AsInvokeStaticOrDirect(), Location::RegisterLocation(RDI)); + codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(), CpuRegister(RDI)); codegen->RecordPcInfo(invoke, invoke->GetDexPc()); // Copy the result back to the expected output. diff --git a/compiler/optimizing/locations.h b/compiler/optimizing/locations.h index 66c5fb1102..09bbb33042 100644 --- a/compiler/optimizing/locations.h +++ b/compiler/optimizing/locations.h @@ -525,8 +525,6 @@ class LocationSummary : public ArenaObject<kArenaAllocMisc> { return temps_.Size(); } - bool HasTemps() const { return !temps_.IsEmpty(); } - Location Out() const { return output_; } bool CanCall() const { return call_kind_ != kNoCall; } diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index d914363688..47927340f4 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -2528,9 +2528,7 @@ class HInvokeStaticOrDirect : public HInvoke { ClinitCheckRequirement clinit_check_requirement) : HInvoke(arena, number_of_arguments, - // There is one extra argument for the HCurrentMethod node, and - // potentially one other if the clinit check is explicit. - clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 2u : 1u, + clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u, return_type, dex_pc, dex_method_index, @@ -2552,7 +2550,6 @@ class HInvokeStaticOrDirect : public HInvoke { bool NeedsDexCache() const OVERRIDE { return !IsRecursive(); } bool IsStringInit() const { return string_init_offset_ != 0; } int32_t GetStringInitOffset() const { return string_init_offset_; } - uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); } // Is this instruction a call to a static method? bool IsStatic() const { diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc index e38e49cd19..a381315bac 100644 --- a/compiler/optimizing/register_allocator.cc +++ b/compiler/optimizing/register_allocator.cc @@ -714,15 +714,13 @@ bool RegisterAllocator::TryAllocateFreeReg(LiveInterval* current) { if (defined_by != nullptr && !current->IsSplit()) { LocationSummary* locations = defined_by->GetLocations(); if (!locations->OutputCanOverlapWithInputs() && locations->Out().IsUnallocated()) { - for (size_t i = 0, e = defined_by->InputCount(); i < e; ++i) { + for (HInputIterator it(defined_by); !it.Done(); it.Advance()) { // Take the last interval of the input. It is the location of that interval // that will be used at `defined_by`. - LiveInterval* interval = defined_by->InputAt(i)->GetLiveInterval()->GetLastSibling(); + LiveInterval* interval = it.Current()->GetLiveInterval()->GetLastSibling(); // Note that interval may have not been processed yet. // TODO: Handle non-split intervals last in the work list. - if (locations->InAt(i).IsValid() - && interval->HasRegister() - && interval->SameRegisterKind(*current)) { + if (interval->HasRegister() && interval->SameRegisterKind(*current)) { // The input must be live until the end of `defined_by`, to comply to // the linear scan algorithm. So we use `defined_by`'s end lifetime // position to check whether the input is dead or is inactive after diff --git a/compiler/optimizing/ssa_liveness_analysis.cc b/compiler/optimizing/ssa_liveness_analysis.cc index 701dbb019b..d5f977feec 100644 --- a/compiler/optimizing/ssa_liveness_analysis.cc +++ b/compiler/optimizing/ssa_liveness_analysis.cc @@ -242,7 +242,7 @@ void SsaLivenessAnalysis::ComputeLiveRanges() { HInstruction* input = current->InputAt(i); // Some instructions 'inline' their inputs, that is they do not need // to be materialized. - if (input->HasSsaIndex() && current->GetLocations()->InAt(i).IsValid()) { + if (input->HasSsaIndex()) { live_in->SetBit(input->GetSsaIndex()); input->GetLiveInterval()->AddUse(current, /* environment */ nullptr, i); } diff --git a/compiler/optimizing/ssa_liveness_analysis.h b/compiler/optimizing/ssa_liveness_analysis.h index 220ee6a8d0..4667825a62 100644 --- a/compiler/optimizing/ssa_liveness_analysis.h +++ b/compiler/optimizing/ssa_liveness_analysis.h @@ -394,7 +394,7 @@ class LiveInterval : public ArenaObject<kArenaAllocMisc> { first_range_->start_ = from; } else { // Instruction without uses. - DCHECK(first_use_ == nullptr); + DCHECK(!defined_by_->HasNonEnvironmentUses()); DCHECK(from == defined_by_->GetLifetimePosition()); first_range_ = last_range_ = range_search_start_ = new (allocator_) LiveRange(from, from + 2, nullptr); diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc index 5663e3973d..42b9182d55 100644 --- a/compiler/optimizing/stack_map_stream.cc +++ b/compiler/optimizing/stack_map_stream.cc @@ -133,13 +133,13 @@ size_t StackMapStream::PrepareForFillIn() { stack_mask_size_ = RoundUp(stack_mask_number_of_bits, kBitsPerByte) / kBitsPerByte; inline_info_size_ = ComputeInlineInfoSize(); dex_register_maps_size_ = ComputeDexRegisterMapsSize(); - stack_maps_size_ = stack_maps_.Size() - * StackMap::ComputeStackMapSize(stack_mask_size_, - inline_info_size_, - dex_register_maps_size_, - dex_pc_max_, - native_pc_offset_max_, - register_mask_max_); + stack_map_encoding_ = StackMapEncoding::CreateFromSizes(stack_mask_size_, + inline_info_size_, + dex_register_maps_size_, + dex_pc_max_, + native_pc_offset_max_, + register_mask_max_); + stack_maps_size_ = stack_maps_.Size() * stack_map_encoding_.ComputeStackMapSize(); dex_register_location_catalog_size_ = ComputeDexRegisterLocationCatalogSize(); // Note: use RoundUp to word-size here if you want CodeInfo objects to be word aligned. @@ -235,14 +235,9 @@ void StackMapStream::FillIn(MemoryRegion region) { MemoryRegion inline_infos_region = region.Subregion( inline_infos_start_, inline_info_size_); - code_info.SetEncoding(inline_info_size_, - dex_register_maps_size_, - dex_pc_max_, - native_pc_offset_max_, - register_mask_max_); + code_info.SetEncoding(stack_map_encoding_); code_info.SetNumberOfStackMaps(stack_maps_.Size()); - code_info.SetStackMaskSize(stack_mask_size_); - DCHECK_EQ(code_info.GetStackMapsSize(), stack_maps_size_); + DCHECK_EQ(code_info.GetStackMapsSize(code_info.ExtractEncoding()), stack_maps_size_); // Set the Dex register location catalog. code_info.SetNumberOfDexRegisterLocationCatalogEntries(location_catalog_entries_.Size()); @@ -263,26 +258,27 @@ void StackMapStream::FillIn(MemoryRegion region) { uintptr_t next_dex_register_map_offset = 0; uintptr_t next_inline_info_offset = 0; for (size_t i = 0, e = stack_maps_.Size(); i < e; ++i) { - StackMap stack_map = code_info.GetStackMapAt(i); + StackMap stack_map = code_info.GetStackMapAt(i, stack_map_encoding_); StackMapEntry entry = stack_maps_.Get(i); - stack_map.SetDexPc(code_info, entry.dex_pc); - stack_map.SetNativePcOffset(code_info, entry.native_pc_offset); - stack_map.SetRegisterMask(code_info, entry.register_mask); + stack_map.SetDexPc(stack_map_encoding_, entry.dex_pc); + stack_map.SetNativePcOffset(stack_map_encoding_, entry.native_pc_offset); + stack_map.SetRegisterMask(stack_map_encoding_, entry.register_mask); if (entry.sp_mask != nullptr) { - stack_map.SetStackMask(code_info, *entry.sp_mask); + stack_map.SetStackMask(stack_map_encoding_, *entry.sp_mask); } if (entry.num_dex_registers == 0) { // No dex map available. - stack_map.SetDexRegisterMapOffset(code_info, StackMap::kNoDexRegisterMap); + stack_map.SetDexRegisterMapOffset(stack_map_encoding_, StackMap::kNoDexRegisterMap); } else { // Search for an entry with the same dex map. if (entry.same_dex_register_map_as_ != kNoSameDexMapFound) { // If we have a hit reuse the offset. - stack_map.SetDexRegisterMapOffset(code_info, - code_info.GetStackMapAt(entry.same_dex_register_map_as_) - .GetDexRegisterMapOffset(code_info)); + stack_map.SetDexRegisterMapOffset( + stack_map_encoding_, + code_info.GetStackMapAt(entry.same_dex_register_map_as_, stack_map_encoding_) + .GetDexRegisterMapOffset(stack_map_encoding_)); } else { // New dex registers maps should be added to the stack map. MemoryRegion register_region = dex_register_locations_region.Subregion( @@ -291,7 +287,7 @@ void StackMapStream::FillIn(MemoryRegion region) { next_dex_register_map_offset += register_region.size(); DexRegisterMap dex_register_map(register_region); stack_map.SetDexRegisterMapOffset( - code_info, register_region.start() - dex_register_locations_region.start()); + stack_map_encoding_, register_region.start() - dex_register_locations_region.start()); // Set the dex register location. FillInDexRegisterMap(dex_register_map, @@ -311,7 +307,7 @@ void StackMapStream::FillIn(MemoryRegion region) { // Currently relative to the dex register map. stack_map.SetInlineDescriptorOffset( - code_info, inline_region.start() - dex_register_locations_region.start()); + stack_map_encoding_, inline_region.start() - dex_register_locations_region.start()); inline_info.SetDepth(entry.inlining_depth); for (size_t depth = 0; depth < entry.inlining_depth; ++depth) { @@ -341,7 +337,7 @@ void StackMapStream::FillIn(MemoryRegion region) { } } else { if (inline_info_size_ != 0) { - stack_map.SetInlineDescriptorOffset(code_info, StackMap::kNoInlineInfo); + stack_map.SetInlineDescriptorOffset(stack_map_encoding_, StackMap::kNoInlineInfo); } } } diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h index 0af983b1bf..274d573350 100644 --- a/compiler/optimizing/stack_map_stream.h +++ b/compiler/optimizing/stack_map_stream.h @@ -171,6 +171,7 @@ class StackMapStream : public ValueObject { StackMapEntry current_entry_; InlineInfoEntry current_inline_info_; + StackMapEncoding stack_map_encoding_; size_t stack_mask_size_; size_t inline_info_size_; size_t dex_register_maps_size_; diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc index 666fb604c0..b4ac1b4d1a 100644 --- a/compiler/optimizing/stack_map_test.cc +++ b/compiler/optimizing/stack_map_test.cc @@ -51,32 +51,33 @@ TEST(StackMapTest, Test1) { stream.FillIn(region); CodeInfo code_info(region); - ASSERT_EQ(0u, code_info.GetStackMaskSize()); + StackMapEncoding encoding = code_info.ExtractEncoding(); + ASSERT_EQ(0u, encoding.NumberOfBytesForStackMask()); ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); uint32_t number_of_location_catalog_entries = code_info.GetNumberOfDexRegisterLocationCatalogEntries(); ASSERT_EQ(2u, number_of_location_catalog_entries); - DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(); + DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding); // The Dex register location catalog contains: // - one 1-byte short Dex register location, and // - one 5-byte large Dex register location. size_t expected_location_catalog_size = 1u + 5u; ASSERT_EQ(expected_location_catalog_size, location_catalog.Size()); - StackMap stack_map = code_info.GetStackMapAt(0); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64))); - ASSERT_EQ(0u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info)); + StackMap stack_map = code_info.GetStackMapAt(0, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding))); + ASSERT_EQ(0u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding)); - MemoryRegion stack_mask = stack_map.GetStackMask(code_info); + MemoryRegion stack_mask = stack_map.GetStackMask(encoding); ASSERT_TRUE(SameBits(stack_mask, sp_mask)); - ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info)); + ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding)); DexRegisterMap dex_register_map = - code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0)); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1)); ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers)); @@ -86,16 +87,17 @@ TEST(StackMapTest, Test1) { size_t expected_dex_register_map_size = 1u + 1u; ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size()); - ASSERT_EQ(Kind::kInStack, - dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstant, - dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInStack, - dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstantLargeValue, - dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0, number_of_dex_registers, code_info)); - ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info)); + ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding)); size_t index0 = dex_register_map.GetLocationCatalogEntryIndex( 0, number_of_dex_registers, number_of_location_catalog_entries); @@ -112,7 +114,7 @@ TEST(StackMapTest, Test1) { ASSERT_EQ(0, location0.GetValue()); ASSERT_EQ(-2, location1.GetValue()); - ASSERT_FALSE(stack_map.HasInlineInfo(code_info)); + ASSERT_FALSE(stack_map.HasInlineInfo(encoding)); } TEST(StackMapTest, Test2) { @@ -148,13 +150,14 @@ TEST(StackMapTest, Test2) { stream.FillIn(region); CodeInfo code_info(region); - ASSERT_EQ(2u, code_info.GetStackMaskSize()); + StackMapEncoding encoding = code_info.ExtractEncoding(); + ASSERT_EQ(2u, encoding.NumberOfBytesForStackMask()); ASSERT_EQ(2u, code_info.GetNumberOfStackMaps()); uint32_t number_of_location_catalog_entries = code_info.GetNumberOfDexRegisterLocationCatalogEntries(); ASSERT_EQ(4u, number_of_location_catalog_entries); - DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(); + DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding); // The Dex register location catalog contains: // - three 1-byte short Dex register locations, and // - one 5-byte large Dex register location. @@ -163,19 +166,19 @@ TEST(StackMapTest, Test2) { // First stack map. { - StackMap stack_map = code_info.GetStackMapAt(0); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64))); - ASSERT_EQ(0u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info)); - - MemoryRegion stack_mask = stack_map.GetStackMask(code_info); + StackMap stack_map = code_info.GetStackMapAt(0, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding))); + ASSERT_EQ(0u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding)); + + MemoryRegion stack_mask = stack_map.GetStackMask(encoding); ASSERT_TRUE(SameBits(stack_mask, sp_mask1)); - ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info)); + ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding)); DexRegisterMap dex_register_map = - code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0)); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1)); ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers)); @@ -185,16 +188,17 @@ TEST(StackMapTest, Test2) { size_t expected_dex_register_map_size = 1u + 1u; ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size()); - ASSERT_EQ(Kind::kInStack, - dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstant, - dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInStack, - dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstantLargeValue, - dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0, number_of_dex_registers, code_info)); - ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info)); + ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding)); size_t index0 = dex_register_map.GetLocationCatalogEntryIndex( 0, number_of_dex_registers, number_of_location_catalog_entries); @@ -211,8 +215,8 @@ TEST(StackMapTest, Test2) { ASSERT_EQ(0, location0.GetValue()); ASSERT_EQ(-2, location1.GetValue()); - ASSERT_TRUE(stack_map.HasInlineInfo(code_info)); - InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map); + ASSERT_TRUE(stack_map.HasInlineInfo(encoding)); + InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding); ASSERT_EQ(2u, inline_info.GetDepth()); ASSERT_EQ(82u, inline_info.GetMethodIndexAtDepth(0)); ASSERT_EQ(42u, inline_info.GetMethodIndexAtDepth(1)); @@ -224,19 +228,19 @@ TEST(StackMapTest, Test2) { // Second stack map. { - StackMap stack_map = code_info.GetStackMapAt(1); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u))); - ASSERT_EQ(1u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(128u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0xFFu, stack_map.GetRegisterMask(code_info)); - - MemoryRegion stack_mask = stack_map.GetStackMask(code_info); + StackMap stack_map = code_info.GetStackMapAt(1, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u, encoding))); + ASSERT_EQ(1u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(128u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0xFFu, stack_map.GetRegisterMask(encoding)); + + MemoryRegion stack_mask = stack_map.GetStackMask(encoding); ASSERT_TRUE(SameBits(stack_mask, sp_mask2)); - ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info)); + ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding)); DexRegisterMap dex_register_map = - code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0)); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1)); ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers)); @@ -246,16 +250,18 @@ TEST(StackMapTest, Test2) { size_t expected_dex_register_map_size = 1u + 1u; ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size()); - ASSERT_EQ(Kind::kInRegister, - dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInFpuRegister, - dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInRegister, - dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kInFpuRegister, - dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(18, dex_register_map.GetMachineRegister(0, number_of_dex_registers, code_info)); - ASSERT_EQ(3, dex_register_map.GetMachineRegister(1, number_of_dex_registers, code_info)); + ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationInternalKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationInternalKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(18, dex_register_map.GetMachineRegister( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(3, dex_register_map.GetMachineRegister( + 1, number_of_dex_registers, code_info, encoding)); size_t index0 = dex_register_map.GetLocationCatalogEntryIndex( 0, number_of_dex_registers, number_of_location_catalog_entries); @@ -272,7 +278,7 @@ TEST(StackMapTest, Test2) { ASSERT_EQ(18, location0.GetValue()); ASSERT_EQ(3, location1.GetValue()); - ASSERT_FALSE(stack_map.HasInlineInfo(code_info)); + ASSERT_FALSE(stack_map.HasInlineInfo(encoding)); } } @@ -294,28 +300,29 @@ TEST(StackMapTest, TestNonLiveDexRegisters) { stream.FillIn(region); CodeInfo code_info(region); - ASSERT_EQ(0u, code_info.GetStackMaskSize()); + StackMapEncoding encoding = code_info.ExtractEncoding(); + ASSERT_EQ(0u, encoding.NumberOfBytesForStackMask()); ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); uint32_t number_of_location_catalog_entries = code_info.GetNumberOfDexRegisterLocationCatalogEntries(); ASSERT_EQ(1u, number_of_location_catalog_entries); - DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(); + DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding); // The Dex register location catalog contains: // - one 5-byte large Dex register location. size_t expected_location_catalog_size = 5u; ASSERT_EQ(expected_location_catalog_size, location_catalog.Size()); - StackMap stack_map = code_info.GetStackMapAt(0); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64))); - ASSERT_EQ(0u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info)); + StackMap stack_map = code_info.GetStackMapAt(0, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding))); + ASSERT_EQ(0u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding)); - ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info)); + ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding)); DexRegisterMap dex_register_map = - code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers); ASSERT_FALSE(dex_register_map.IsDexRegisterLive(0)); ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1)); ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers)); @@ -325,15 +332,15 @@ TEST(StackMapTest, TestNonLiveDexRegisters) { size_t expected_dex_register_map_size = 1u + 0u; ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size()); - ASSERT_EQ(Kind::kNone, - dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstant, - dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kNone, - dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info)); - ASSERT_EQ(Kind::kConstantLargeValue, - dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info)); - ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info)); + ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationInternalKind( + 0, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind( + 1, number_of_dex_registers, code_info, encoding)); + ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding)); size_t index0 = dex_register_map.GetLocationCatalogEntryIndex( 0, number_of_dex_registers, number_of_location_catalog_entries); @@ -350,7 +357,7 @@ TEST(StackMapTest, TestNonLiveDexRegisters) { ASSERT_EQ(0, location0.GetValue()); ASSERT_EQ(-2, location1.GetValue()); - ASSERT_FALSE(stack_map.HasInlineInfo(code_info)); + ASSERT_FALSE(stack_map.HasInlineInfo(encoding)); } // Generate a stack map whose dex register offset is @@ -387,6 +394,7 @@ TEST(StackMapTest, DexRegisterMapOffsetOverflow) { stream.FillIn(region); CodeInfo code_info(region); + StackMapEncoding encoding = code_info.ExtractEncoding(); // The location catalog contains two entries (DexRegisterLocation(kConstant, 0) // and DexRegisterLocation(kConstant, 1)), therefore the location catalog index // has a size of 1 bit. @@ -402,20 +410,20 @@ TEST(StackMapTest, DexRegisterMapOffsetOverflow) { // locations (that is, 127 bytes of data). // Hence it has a size of 255 bytes, and therefore... ASSERT_EQ(128u, DexRegisterMap::GetLiveBitMaskSize(number_of_dex_registers)); - StackMap stack_map0 = code_info.GetStackMapAt(0); + StackMap stack_map0 = code_info.GetStackMapAt(0, encoding); DexRegisterMap dex_register_map0 = - code_info.GetDexRegisterMapOf(stack_map0, number_of_dex_registers); + code_info.GetDexRegisterMapOf(stack_map0, encoding, number_of_dex_registers); ASSERT_EQ(127u, dex_register_map0.GetLocationMappingDataSize(number_of_dex_registers, number_of_location_catalog_entries)); ASSERT_EQ(255u, dex_register_map0.Size()); - StackMap stack_map1 = code_info.GetStackMapAt(1); - ASSERT_TRUE(stack_map1.HasDexRegisterMap(code_info)); + StackMap stack_map1 = code_info.GetStackMapAt(1, encoding); + ASSERT_TRUE(stack_map1.HasDexRegisterMap(encoding)); // ...the offset of the second Dex register map (relative to the // beginning of the Dex register maps region) is 255 (i.e., // kNoDexRegisterMapSmallEncoding). - ASSERT_NE(stack_map1.GetDexRegisterMapOffset(code_info), StackMap::kNoDexRegisterMap); - ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(code_info), 0xFFu); + ASSERT_NE(stack_map1.GetDexRegisterMapOffset(encoding), StackMap::kNoDexRegisterMap); + ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(encoding), 0xFFu); } TEST(StackMapTest, TestShareDexRegisterMap) { @@ -447,28 +455,30 @@ TEST(StackMapTest, TestShareDexRegisterMap) { stream.FillIn(region); CodeInfo ci(region); + StackMapEncoding encoding = ci.ExtractEncoding(); + // Verify first stack map. - StackMap sm0 = ci.GetStackMapAt(0); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, number_of_dex_registers); - ASSERT_EQ(0, dex_registers0.GetMachineRegister(0, number_of_dex_registers, ci)); - ASSERT_EQ(-2, dex_registers0.GetConstant(1, number_of_dex_registers, ci)); + StackMap sm0 = ci.GetStackMapAt(0, encoding); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, encoding, number_of_dex_registers); + ASSERT_EQ(0, dex_registers0.GetMachineRegister(0, number_of_dex_registers, ci, encoding)); + ASSERT_EQ(-2, dex_registers0.GetConstant(1, number_of_dex_registers, ci, encoding)); // Verify second stack map. - StackMap sm1 = ci.GetStackMapAt(1); - DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1, number_of_dex_registers); - ASSERT_EQ(0, dex_registers1.GetMachineRegister(0, number_of_dex_registers, ci)); - ASSERT_EQ(-2, dex_registers1.GetConstant(1, number_of_dex_registers, ci)); + StackMap sm1 = ci.GetStackMapAt(1, encoding); + DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1, encoding, number_of_dex_registers); + ASSERT_EQ(0, dex_registers1.GetMachineRegister(0, number_of_dex_registers, ci, encoding)); + ASSERT_EQ(-2, dex_registers1.GetConstant(1, number_of_dex_registers, ci, encoding)); // Verify third stack map. - StackMap sm2 = ci.GetStackMapAt(2); - DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2, number_of_dex_registers); - ASSERT_EQ(2, dex_registers2.GetMachineRegister(0, number_of_dex_registers, ci)); - ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers, ci)); + StackMap sm2 = ci.GetStackMapAt(2, encoding); + DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2, encoding, number_of_dex_registers); + ASSERT_EQ(2, dex_registers2.GetMachineRegister(0, number_of_dex_registers, ci, encoding)); + ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers, ci, encoding)); // Verify dex register map offsets. - ASSERT_EQ(sm0.GetDexRegisterMapOffset(ci), sm1.GetDexRegisterMapOffset(ci)); - ASSERT_NE(sm0.GetDexRegisterMapOffset(ci), sm2.GetDexRegisterMapOffset(ci)); - ASSERT_NE(sm1.GetDexRegisterMapOffset(ci), sm2.GetDexRegisterMapOffset(ci)); + ASSERT_EQ(sm0.GetDexRegisterMapOffset(encoding), sm1.GetDexRegisterMapOffset(encoding)); + ASSERT_NE(sm0.GetDexRegisterMapOffset(encoding), sm2.GetDexRegisterMapOffset(encoding)); + ASSERT_NE(sm1.GetDexRegisterMapOffset(encoding), sm2.GetDexRegisterMapOffset(encoding)); } TEST(StackMapTest, TestNoDexRegisterMap) { @@ -487,24 +497,25 @@ TEST(StackMapTest, TestNoDexRegisterMap) { stream.FillIn(region); CodeInfo code_info(region); - ASSERT_EQ(0u, code_info.GetStackMaskSize()); + StackMapEncoding encoding = code_info.ExtractEncoding(); + ASSERT_EQ(0u, encoding.NumberOfBytesForStackMask()); ASSERT_EQ(1u, code_info.GetNumberOfStackMaps()); uint32_t number_of_location_catalog_entries = code_info.GetNumberOfDexRegisterLocationCatalogEntries(); ASSERT_EQ(0u, number_of_location_catalog_entries); - DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(); + DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding); ASSERT_EQ(0u, location_catalog.Size()); - StackMap stack_map = code_info.GetStackMapAt(0); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0))); - ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64))); - ASSERT_EQ(0u, stack_map.GetDexPc(code_info)); - ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info)); - ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info)); + StackMap stack_map = code_info.GetStackMapAt(0, encoding); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding))); + ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding))); + ASSERT_EQ(0u, stack_map.GetDexPc(encoding)); + ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding)); + ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding)); - ASSERT_FALSE(stack_map.HasDexRegisterMap(code_info)); - ASSERT_FALSE(stack_map.HasInlineInfo(code_info)); + ASSERT_FALSE(stack_map.HasDexRegisterMap(encoding)); + ASSERT_FALSE(stack_map.HasInlineInfo(encoding)); } TEST(StackMapTest, InlineTest) { @@ -579,16 +590,17 @@ TEST(StackMapTest, InlineTest) { stream.FillIn(region); CodeInfo ci(region); + StackMapEncoding encoding = ci.ExtractEncoding(); { // Verify first stack map. - StackMap sm0 = ci.GetStackMapAt(0); + StackMap sm0 = ci.GetStackMapAt(0, encoding); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, 2); - ASSERT_EQ(0, dex_registers0.GetStackOffsetInBytes(0, 2, ci)); - ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci)); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, encoding, 2); + ASSERT_EQ(0, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding)); + ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci, encoding)); - InlineInfo if0 = ci.GetInlineInfoOf(sm0); + InlineInfo if0 = ci.GetInlineInfoOf(sm0, encoding); ASSERT_EQ(2u, if0.GetDepth()); ASSERT_EQ(2u, if0.GetDexPcAtDepth(0)); ASSERT_EQ(42u, if0.GetMethodIndexAtDepth(0)); @@ -597,24 +609,24 @@ TEST(StackMapTest, InlineTest) { ASSERT_EQ(82u, if0.GetMethodIndexAtDepth(1)); ASSERT_EQ(kStatic, if0.GetInvokeTypeAtDepth(1)); - DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, 1); - ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci)); + DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, encoding, 1); + ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding)); - DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if0, 3); - ASSERT_EQ(16, dex_registers2.GetStackOffsetInBytes(0, 3, ci)); - ASSERT_EQ(20, dex_registers2.GetConstant(1, 3, ci)); - ASSERT_EQ(15, dex_registers2.GetMachineRegister(2, 3, ci)); + DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if0, encoding, 3); + ASSERT_EQ(16, dex_registers2.GetStackOffsetInBytes(0, 3, ci, encoding)); + ASSERT_EQ(20, dex_registers2.GetConstant(1, 3, ci, encoding)); + ASSERT_EQ(15, dex_registers2.GetMachineRegister(2, 3, ci, encoding)); } { // Verify second stack map. - StackMap sm1 = ci.GetStackMapAt(1); + StackMap sm1 = ci.GetStackMapAt(1, encoding); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1, 2); - ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci)); - ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci)); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1, encoding, 2); + ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding)); + ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci, encoding)); - InlineInfo if1 = ci.GetInlineInfoOf(sm1); + InlineInfo if1 = ci.GetInlineInfoOf(sm1, encoding); ASSERT_EQ(3u, if1.GetDepth()); ASSERT_EQ(2u, if1.GetDexPcAtDepth(0)); ASSERT_EQ(42u, if1.GetMethodIndexAtDepth(0)); @@ -626,36 +638,36 @@ TEST(StackMapTest, InlineTest) { ASSERT_EQ(52u, if1.GetMethodIndexAtDepth(2)); ASSERT_EQ(kVirtual, if1.GetInvokeTypeAtDepth(2)); - DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, 1); - ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci)); + DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, encoding, 1); + ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding)); - DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if1, 3); - ASSERT_EQ(80, dex_registers2.GetStackOffsetInBytes(0, 3, ci)); - ASSERT_EQ(10, dex_registers2.GetConstant(1, 3, ci)); - ASSERT_EQ(5, dex_registers2.GetMachineRegister(2, 3, ci)); + DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if1, encoding, 3); + ASSERT_EQ(80, dex_registers2.GetStackOffsetInBytes(0, 3, ci, encoding)); + ASSERT_EQ(10, dex_registers2.GetConstant(1, 3, ci, encoding)); + ASSERT_EQ(5, dex_registers2.GetMachineRegister(2, 3, ci, encoding)); ASSERT_FALSE(if1.HasDexRegisterMapAtDepth(2)); } { // Verify third stack map. - StackMap sm2 = ci.GetStackMapAt(2); + StackMap sm2 = ci.GetStackMapAt(2, encoding); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2, 2); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2, encoding, 2); ASSERT_FALSE(dex_registers0.IsDexRegisterLive(0)); - ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci)); - ASSERT_FALSE(sm2.HasInlineInfo(ci)); + ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci, encoding)); + ASSERT_FALSE(sm2.HasInlineInfo(encoding)); } { // Verify fourth stack map. - StackMap sm3 = ci.GetStackMapAt(3); + StackMap sm3 = ci.GetStackMapAt(3, encoding); - DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3, 2); - ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci)); - ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci)); + DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3, encoding, 2); + ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding)); + ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci, encoding)); - InlineInfo if2 = ci.GetInlineInfoOf(sm3); + InlineInfo if2 = ci.GetInlineInfoOf(sm3, encoding); ASSERT_EQ(3u, if2.GetDepth()); ASSERT_EQ(2u, if2.GetDexPcAtDepth(0)); ASSERT_EQ(42u, if2.GetMethodIndexAtDepth(0)); @@ -669,12 +681,12 @@ TEST(StackMapTest, InlineTest) { ASSERT_FALSE(if2.HasDexRegisterMapAtDepth(0)); - DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(1, if2, 1); - ASSERT_EQ(2, dex_registers1.GetMachineRegister(0, 1, ci)); + DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(1, if2, encoding, 1); + ASSERT_EQ(2, dex_registers1.GetMachineRegister(0, 1, ci, encoding)); - DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(2, if2, 2); + DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(2, if2, encoding, 2); ASSERT_FALSE(dex_registers2.IsDexRegisterLive(0)); - ASSERT_EQ(3, dex_registers2.GetMachineRegister(1, 2, ci)); + ASSERT_EQ(3, dex_registers2.GetMachineRegister(1, 2, ci, encoding)); } } |