summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator.cc5
-rw-r--r--compiler/optimizing/code_generator.h1
-rw-r--r--compiler/optimizing/data_type.h15
-rw-r--r--compiler/optimizing/instruction_builder.cc193
-rw-r--r--compiler/optimizing/instruction_builder.h39
-rw-r--r--compiler/optimizing/load_store_analysis.h59
-rw-r--r--compiler/optimizing/load_store_analysis_test.cc119
-rw-r--r--compiler/optimizing/load_store_elimination.cc119
-rw-r--r--compiler/optimizing/nodes.h8
-rw-r--r--compiler/optimizing/optimizing_compiler_stats.h1
-rw-r--r--compiler/optimizing/scheduler.cc20
-rw-r--r--compiler/optimizing/scheduler.h8
-rw-r--r--compiler/optimizing/scheduler_test.cc24
-rw-r--r--compiler/optimizing/stack_map_stream.cc405
-rw-r--r--compiler/optimizing/stack_map_stream.h41
-rw-r--r--compiler/optimizing/stack_map_test.cc545
16 files changed, 675 insertions, 927 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index f57333741c..de1be5b871 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -771,7 +771,7 @@ void CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(
void CodeGenerator::GenerateLoadMethodTypeRuntimeCall(HLoadMethodType* method_type) {
LocationSummary* locations = method_type->GetLocations();
- MoveConstant(locations->GetTemp(0), method_type->GetProtoIndex());
+ MoveConstant(locations->GetTemp(0), method_type->GetProtoIndex().index_);
CheckEntrypointTypes<kQuickResolveMethodType, void*, uint32_t>();
InvokeRuntime(kQuickResolveMethodType, method_type, method_type->GetDexPc());
}
@@ -975,11 +975,10 @@ static void CheckCovers(uint32_t dex_pc,
const CodeInfo& code_info,
const ArenaVector<HSuspendCheck*>& loop_headers,
ArenaVector<size_t>* covered) {
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
for (size_t i = 0; i < loop_headers.size(); ++i) {
if (loop_headers[i]->GetDexPc() == dex_pc) {
if (graph.IsCompilingOsr()) {
- DCHECK(code_info.GetOsrStackMapForDexPc(dex_pc, encoding).IsValid());
+ DCHECK(code_info.GetOsrStackMapForDexPc(dex_pc).IsValid());
}
++(*covered)[i];
}
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index bcb25997f4..a340446ac3 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -35,7 +35,6 @@
#include "optimizing_compiler_stats.h"
#include "read_barrier_option.h"
#include "stack.h"
-#include "stack_map.h"
#include "utils/label.h"
namespace art {
diff --git a/compiler/optimizing/data_type.h b/compiler/optimizing/data_type.h
index be26e67af3..5ac6e46003 100644
--- a/compiler/optimizing/data_type.h
+++ b/compiler/optimizing/data_type.h
@@ -216,6 +216,21 @@ class DataType {
Size(result_type) > Size(input_type);
}
+ static Type ToSigned(Type type) {
+ switch (type) {
+ case Type::kUint8:
+ return Type::kInt8;
+ case Type::kUint16:
+ return Type::kInt16;
+ case Type::kUint32:
+ return Type::kInt32;
+ case Type::kUint64:
+ return Type::kInt64;
+ default:
+ return type;
+ }
+ }
+
static const char* PrettyDescriptor(Type type);
private:
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 35a39456a2..24dc2ee9b4 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -448,11 +448,9 @@ void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) {
invoke_type,
target_method,
HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
+ RangeInstructionOperands operands(graph_->GetNumberOfVRegs() - in_vregs, in_vregs);
HandleInvoke(invoke,
- in_vregs,
- /* args */ nullptr,
- graph_->GetNumberOfVRegs() - in_vregs,
- /* is_range */ true,
+ operands,
dex_file_->GetMethodShorty(method_idx),
/* clinit_check */ nullptr,
/* is_unresolved */ false);
@@ -916,10 +914,7 @@ static bool IsStringConstructor(ArtMethod* method) {
bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
uint32_t dex_pc,
uint32_t method_idx,
- uint32_t number_of_vreg_arguments,
- bool is_range,
- uint32_t* args,
- uint32_t register_index) {
+ const InstructionOperands& operands) {
InvokeType invoke_type = GetInvokeTypeFromOpCode(instruction.Opcode());
const char* descriptor = dex_file_->GetMethodShorty(method_idx);
DataType::Type return_type = DataType::FromShorty(descriptor[0]);
@@ -943,12 +938,9 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
method_idx,
invoke_type);
return HandleInvoke(invoke,
- number_of_vreg_arguments,
- args,
- register_index,
- is_range,
+ operands,
descriptor,
- nullptr, /* clinit_check */
+ nullptr /* clinit_check */,
true /* is_unresolved */);
}
@@ -976,12 +968,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
invoke_type,
target_method,
HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
- return HandleStringInit(invoke,
- number_of_vreg_arguments,
- args,
- register_index,
- is_range,
- descriptor);
+ return HandleStringInit(invoke, operands, descriptor);
}
// Potential class initialization check, in the case of a static method call.
@@ -1042,26 +1029,16 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
ImTable::GetImtIndex(resolved_method));
}
- return HandleInvoke(invoke,
- number_of_vreg_arguments,
- args,
- register_index,
- is_range,
- descriptor,
- clinit_check,
- false /* is_unresolved */);
+ return HandleInvoke(invoke, operands, descriptor, clinit_check, false /* is_unresolved */);
}
bool HInstructionBuilder::BuildInvokePolymorphic(const Instruction& instruction ATTRIBUTE_UNUSED,
uint32_t dex_pc,
uint32_t method_idx,
- uint32_t proto_idx,
- uint32_t number_of_vreg_arguments,
- bool is_range,
- uint32_t* args,
- uint32_t register_index) {
+ dex::ProtoIndex proto_idx,
+ const InstructionOperands& operands) {
const char* descriptor = dex_file_->GetShorty(proto_idx);
- DCHECK_EQ(1 + ArtMethod::NumArgRegisters(descriptor), number_of_vreg_arguments);
+ DCHECK_EQ(1 + ArtMethod::NumArgRegisters(descriptor), operands.GetNumberOfOperands());
DataType::Type return_type = DataType::FromShorty(descriptor[0]);
size_t number_of_arguments = strlen(descriptor);
HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_,
@@ -1070,10 +1047,7 @@ bool HInstructionBuilder::BuildInvokePolymorphic(const Instruction& instruction
dex_pc,
method_idx);
return HandleInvoke(invoke,
- number_of_vreg_arguments,
- args,
- register_index,
- is_range,
+ operands,
descriptor,
nullptr /* clinit_check */,
false /* is_unresolved */);
@@ -1222,26 +1196,22 @@ HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke(
}
bool HInstructionBuilder::SetupInvokeArguments(HInvoke* invoke,
- uint32_t number_of_vreg_arguments,
- uint32_t* args,
- uint32_t register_index,
- bool is_range,
+ const InstructionOperands& operands,
const char* descriptor,
size_t start_index,
size_t* argument_index) {
uint32_t descriptor_index = 1; // Skip the return type.
-
+ const size_t number_of_operands = operands.GetNumberOfOperands();
for (size_t i = start_index;
// Make sure we don't go over the expected arguments or over the number of
// dex registers given. If the instruction was seen as dead by the verifier,
// it hasn't been properly checked.
- (i < number_of_vreg_arguments) && (*argument_index < invoke->GetNumberOfArguments());
+ (i < number_of_operands) && (*argument_index < invoke->GetNumberOfArguments());
i++, (*argument_index)++) {
DataType::Type type = DataType::FromShorty(descriptor[descriptor_index++]);
bool is_wide = (type == DataType::Type::kInt64) || (type == DataType::Type::kFloat64);
- if (!is_range
- && is_wide
- && ((i + 1 == number_of_vreg_arguments) || (args[i] + 1 != args[i + 1]))) {
+ if (is_wide && ((i + 1 == number_of_operands) ||
+ (operands.GetOperand(i) + 1 != operands.GetOperand(i + 1)))) {
// Longs and doubles should be in pairs, that is, sequential registers. The verifier should
// reject any class where this is violated. However, the verifier only does these checks
// on non trivially dead instructions, so we just bailout the compilation.
@@ -1252,7 +1222,7 @@ bool HInstructionBuilder::SetupInvokeArguments(HInvoke* invoke,
MethodCompilationStat::kNotCompiledMalformedOpcode);
return false;
}
- HInstruction* arg = LoadLocal(is_range ? register_index + i : args[i], type);
+ HInstruction* arg = LoadLocal(operands.GetOperand(i), type);
invoke->SetArgumentAt(*argument_index, arg);
if (is_wide) {
i++;
@@ -1279,10 +1249,7 @@ bool HInstructionBuilder::SetupInvokeArguments(HInvoke* invoke,
}
bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
- uint32_t number_of_vreg_arguments,
- uint32_t* args,
- uint32_t register_index,
- bool is_range,
+ const InstructionOperands& operands,
const char* descriptor,
HClinitCheck* clinit_check,
bool is_unresolved) {
@@ -1291,7 +1258,7 @@ bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
size_t start_index = 0;
size_t argument_index = 0;
if (invoke->GetInvokeType() != InvokeType::kStatic) { // Instance call.
- uint32_t obj_reg = is_range ? register_index : args[0];
+ uint32_t obj_reg = operands.GetOperand(0);
HInstruction* arg = is_unresolved
? LoadLocal(obj_reg, DataType::Type::kReference)
: LoadNullCheckedLocal(obj_reg, invoke->GetDexPc());
@@ -1300,14 +1267,7 @@ bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
argument_index = 1;
}
- if (!SetupInvokeArguments(invoke,
- number_of_vreg_arguments,
- args,
- register_index,
- is_range,
- descriptor,
- start_index,
- &argument_index)) {
+ if (!SetupInvokeArguments(invoke, operands, descriptor, start_index, &argument_index)) {
return false;
}
@@ -1327,24 +1287,14 @@ bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
}
bool HInstructionBuilder::HandleStringInit(HInvoke* invoke,
- uint32_t number_of_vreg_arguments,
- uint32_t* args,
- uint32_t register_index,
- bool is_range,
+ const InstructionOperands& operands,
const char* descriptor) {
DCHECK(invoke->IsInvokeStaticOrDirect());
DCHECK(invoke->AsInvokeStaticOrDirect()->IsStringInit());
size_t start_index = 1;
size_t argument_index = 0;
- if (!SetupInvokeArguments(invoke,
- number_of_vreg_arguments,
- args,
- register_index,
- is_range,
- descriptor,
- start_index,
- &argument_index)) {
+ if (!SetupInvokeArguments(invoke, operands, descriptor, start_index, &argument_index)) {
return false;
}
@@ -1352,7 +1302,7 @@ bool HInstructionBuilder::HandleStringInit(HInvoke* invoke,
// This is a StringFactory call, not an actual String constructor. Its result
// replaces the empty String pre-allocated by NewInstance.
- uint32_t orig_this_reg = is_range ? register_index : args[0];
+ uint32_t orig_this_reg = operands.GetOperand(0);
HInstruction* arg_this = LoadLocal(orig_this_reg, DataType::Type::kReference);
// Replacing the NewInstance might render it redundant. Keep a list of these
@@ -1360,9 +1310,15 @@ bool HInstructionBuilder::HandleStringInit(HInvoke* invoke,
if (arg_this->IsNewInstance()) {
ssa_builder_->AddUninitializedString(arg_this->AsNewInstance());
} else {
+ // The only reason a HPhi can flow in a String.<init> is when there is an
+ // irreducible loop, which will create HPhi for all dex registers at loop entry.
DCHECK(arg_this->IsPhi());
- // NewInstance is not the direct input of the StringFactory call. It might
- // be redundant but optimizing this case is not worth the effort.
+ DCHECK(graph_->HasIrreducibleLoops());
+ // Don't bother compiling a method in that situation. While we could look at all
+ // phis related to the HNewInstance, it's not worth the trouble.
+ MaybeRecordStat(compilation_stats_,
+ MethodCompilationStat::kNotCompiledIrreducibleAndStringInit);
+ return false;
}
// Walk over all vregs and replace any occurrence of `arg_this` with `invoke`.
@@ -1699,11 +1655,9 @@ void HInstructionBuilder::BuildArrayAccess(const Instruction& instruction,
HNewArray* HInstructionBuilder::BuildFilledNewArray(uint32_t dex_pc,
dex::TypeIndex type_index,
- uint32_t number_of_vreg_arguments,
- bool is_range,
- uint32_t* args,
- uint32_t register_index) {
- HInstruction* length = graph_->GetIntConstant(number_of_vreg_arguments, dex_pc);
+ const InstructionOperands& operands) {
+ const size_t number_of_operands = operands.GetNumberOfOperands();
+ HInstruction* length = graph_->GetIntConstant(number_of_operands, dex_pc);
HLoadClass* cls = BuildLoadClass(type_index, dex_pc);
HNewArray* const object = new (allocator_) HNewArray(cls, length, dex_pc);
AppendInstruction(object);
@@ -1717,8 +1671,8 @@ HNewArray* HInstructionBuilder::BuildFilledNewArray(uint32_t dex_pc,
bool is_reference_array = (primitive == 'L') || (primitive == '[');
DataType::Type type = is_reference_array ? DataType::Type::kReference : DataType::Type::kInt32;
- for (size_t i = 0; i < number_of_vreg_arguments; ++i) {
- HInstruction* value = LoadLocal(is_range ? register_index + i : args[i], type);
+ for (size_t i = 0; i < number_of_operands; ++i) {
+ HInstruction* value = LoadLocal(operands.GetOperand(i), type);
HInstruction* index = graph_->GetIntConstant(i, dex_pc);
HArraySet* aset = new (allocator_) HArraySet(object, index, value, type, dex_pc);
ssa_builder_->MaybeAddAmbiguousArraySet(aset);
@@ -1896,17 +1850,17 @@ bool HInstructionBuilder::LoadClassNeedsAccessCheck(Handle<mirror::Class> klass)
}
}
-void HInstructionBuilder::BuildLoadMethodHandle(uint16_t proto_idx, uint32_t dex_pc) {
+void HInstructionBuilder::BuildLoadMethodHandle(uint16_t method_handle_index, uint32_t dex_pc) {
const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
- HLoadMethodHandle* load_method_handle =
- new (allocator_) HLoadMethodHandle(graph_->GetCurrentMethod(), proto_idx, dex_file, dex_pc);
+ HLoadMethodHandle* load_method_handle = new (allocator_) HLoadMethodHandle(
+ graph_->GetCurrentMethod(), method_handle_index, dex_file, dex_pc);
AppendInstruction(load_method_handle);
}
-void HInstructionBuilder::BuildLoadMethodType(uint16_t proto_idx, uint32_t dex_pc) {
+void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint32_t dex_pc) {
const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
HLoadMethodType* load_method_type =
- new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_idx, dex_file, dex_pc);
+ new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
AppendInstruction(load_method_type);
}
@@ -2151,11 +2105,10 @@ bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction,
} else {
method_idx = instruction.VRegB_35c();
}
- uint32_t number_of_vreg_arguments = instruction.VRegA_35c();
uint32_t args[5];
- instruction.GetVarArgs(args);
- if (!BuildInvoke(instruction, dex_pc, method_idx,
- number_of_vreg_arguments, false, args, -1)) {
+ uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
+ VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
+ if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
return false;
}
break;
@@ -2178,10 +2131,8 @@ bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction,
} else {
method_idx = instruction.VRegB_3rc();
}
- uint32_t number_of_vreg_arguments = instruction.VRegA_3rc();
- uint32_t register_index = instruction.VRegC();
- if (!BuildInvoke(instruction, dex_pc, method_idx,
- number_of_vreg_arguments, true, nullptr, register_index)) {
+ RangeInstructionOperands operands(instruction.VRegC(), instruction.VRegA_3rc());
+ if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
return false;
}
break;
@@ -2189,33 +2140,18 @@ bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction,
case Instruction::INVOKE_POLYMORPHIC: {
uint16_t method_idx = instruction.VRegB_45cc();
- uint16_t proto_idx = instruction.VRegH_45cc();
- uint32_t number_of_vreg_arguments = instruction.VRegA_45cc();
+ dex::ProtoIndex proto_idx(instruction.VRegH_45cc());
uint32_t args[5];
- instruction.GetVarArgs(args);
- return BuildInvokePolymorphic(instruction,
- dex_pc,
- method_idx,
- proto_idx,
- number_of_vreg_arguments,
- false,
- args,
- -1);
+ uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
+ VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
+ return BuildInvokePolymorphic(instruction, dex_pc, method_idx, proto_idx, operands);
}
case Instruction::INVOKE_POLYMORPHIC_RANGE: {
uint16_t method_idx = instruction.VRegB_4rcc();
- uint16_t proto_idx = instruction.VRegH_4rcc();
- uint32_t number_of_vreg_arguments = instruction.VRegA_4rcc();
- uint32_t register_index = instruction.VRegC_4rcc();
- return BuildInvokePolymorphic(instruction,
- dex_pc,
- method_idx,
- proto_idx,
- number_of_vreg_arguments,
- true,
- nullptr,
- register_index);
+ dex::ProtoIndex proto_idx(instruction.VRegH_4rcc());
+ RangeInstructionOperands operands(instruction.VRegC_4rcc(), instruction.VRegA_4rcc());
+ return BuildInvokePolymorphic(instruction, dex_pc, method_idx, proto_idx, operands);
}
case Instruction::NEG_INT: {
@@ -2763,30 +2699,19 @@ bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction,
}
case Instruction::FILLED_NEW_ARRAY: {
- uint32_t number_of_vreg_arguments = instruction.VRegA_35c();
dex::TypeIndex type_index(instruction.VRegB_35c());
uint32_t args[5];
- instruction.GetVarArgs(args);
- HNewArray* new_array = BuildFilledNewArray(dex_pc,
- type_index,
- number_of_vreg_arguments,
- /* is_range */ false,
- args,
- /* register_index */ 0);
+ uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
+ VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
+ HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
BuildConstructorFenceForAllocation(new_array);
break;
}
case Instruction::FILLED_NEW_ARRAY_RANGE: {
- uint32_t number_of_vreg_arguments = instruction.VRegA_3rc();
dex::TypeIndex type_index(instruction.VRegB_3rc());
- uint32_t register_index = instruction.VRegC_3rc();
- HNewArray* new_array = BuildFilledNewArray(dex_pc,
- type_index,
- number_of_vreg_arguments,
- /* is_range */ true,
- /* args*/ nullptr,
- register_index);
+ RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
+ HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
BuildConstructorFenceForAllocation(new_array);
break;
}
@@ -2949,7 +2874,7 @@ bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction,
}
case Instruction::CONST_METHOD_TYPE: {
- uint16_t proto_idx = instruction.VRegB_21c();
+ dex::ProtoIndex proto_idx(instruction.VRegB_21c());
BuildLoadMethodType(proto_idx, dex_pc);
UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
break;
diff --git a/compiler/optimizing/instruction_builder.h b/compiler/optimizing/instruction_builder.h
index 95ffa6b054..2218a691ea 100644
--- a/compiler/optimizing/instruction_builder.h
+++ b/compiler/optimizing/instruction_builder.h
@@ -38,6 +38,7 @@ class CompilerDriver;
class DexCompilationUnit;
class HBasicBlockBuilder;
class Instruction;
+class InstructionOperands;
class OptimizingCompilerStats;
class ScopedObjectAccess;
class SsaBuilder;
@@ -168,29 +169,20 @@ class HInstructionBuilder : public ValueObject {
bool BuildInvoke(const Instruction& instruction,
uint32_t dex_pc,
uint32_t method_idx,
- uint32_t number_of_vreg_arguments,
- bool is_range,
- uint32_t* args,
- uint32_t register_index);
+ const InstructionOperands& operands);
// Builds an invocation node for invoke-polymorphic and returns whether the
// instruction is supported.
bool BuildInvokePolymorphic(const Instruction& instruction,
uint32_t dex_pc,
uint32_t method_idx,
- uint32_t proto_idx,
- uint32_t number_of_vreg_arguments,
- bool is_range,
- uint32_t* args,
- uint32_t register_index);
+ dex::ProtoIndex proto_idx,
+ const InstructionOperands& operands);
// Builds a new array node and the instructions that fill it.
HNewArray* BuildFilledNewArray(uint32_t dex_pc,
dex::TypeIndex type_index,
- uint32_t number_of_vreg_arguments,
- bool is_range,
- uint32_t* args,
- uint32_t register_index);
+ const InstructionOperands& operands);
void BuildFillArrayData(const Instruction& instruction, uint32_t dex_pc);
@@ -240,11 +232,11 @@ class HInstructionBuilder : public ValueObject {
bool LoadClassNeedsAccessCheck(Handle<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_);
- // Builds a `HLoadMethodHandle` loading the given `method_handle_idx`.
+ // Builds a `HLoadMethodHandle` loading the given `method_handle_index`.
void BuildLoadMethodHandle(uint16_t method_handle_idx, uint32_t dex_pc);
- // Builds a `HLoadMethodType` loading the given `proto_idx`.
- void BuildLoadMethodType(uint16_t proto_idx, uint32_t dex_pc);
+ // Builds a `HLoadMethodType` loading the given `proto_index`.
+ void BuildLoadMethodType(dex::ProtoIndex proto_index, uint32_t dex_pc);
// Returns the outer-most compiling method's class.
ObjPtr<mirror::Class> GetOutermostCompilingClass() const;
@@ -260,28 +252,19 @@ class HInstructionBuilder : public ValueObject {
HInvoke* invoke);
bool SetupInvokeArguments(HInvoke* invoke,
- uint32_t number_of_vreg_arguments,
- uint32_t* args,
- uint32_t register_index,
- bool is_range,
+ const InstructionOperands& operands,
const char* descriptor,
size_t start_index,
size_t* argument_index);
bool HandleInvoke(HInvoke* invoke,
- uint32_t number_of_vreg_arguments,
- uint32_t* args,
- uint32_t register_index,
- bool is_range,
+ const InstructionOperands& operands,
const char* descriptor,
HClinitCheck* clinit_check,
bool is_unresolved);
bool HandleStringInit(HInvoke* invoke,
- uint32_t number_of_vreg_arguments,
- uint32_t* args,
- uint32_t register_index,
- bool is_range,
+ const InstructionOperands& operands,
const char* descriptor);
void HandleStringInitResult(HInvokeStaticOrDirect* invoke);
diff --git a/compiler/optimizing/load_store_analysis.h b/compiler/optimizing/load_store_analysis.h
index f84846d1b0..769a3f1b59 100644
--- a/compiler/optimizing/load_store_analysis.h
+++ b/compiler/optimizing/load_store_analysis.h
@@ -94,11 +94,13 @@ class HeapLocation : public ArenaObject<kArenaAllocLSA> {
static constexpr int16_t kDeclaringClassDefIndexForArrays = -1;
HeapLocation(ReferenceInfo* ref_info,
+ DataType::Type type,
size_t offset,
HInstruction* index,
size_t vector_length,
int16_t declaring_class_def_index)
: ref_info_(ref_info),
+ type_(DataType::ToSigned(type)),
offset_(offset),
index_(index),
vector_length_(vector_length),
@@ -116,6 +118,7 @@ class HeapLocation : public ArenaObject<kArenaAllocLSA> {
}
ReferenceInfo* GetReferenceInfo() const { return ref_info_; }
+ DataType::Type GetType() const { return type_; }
size_t GetOffset() const { return offset_; }
HInstruction* GetIndex() const { return index_; }
size_t GetVectorLength() const { return vector_length_; }
@@ -149,6 +152,10 @@ class HeapLocation : public ArenaObject<kArenaAllocLSA> {
private:
// Reference for instance/static field, array element or vector data.
ReferenceInfo* const ref_info_;
+ // Type of data residing at HeapLocation (always signed for integral
+ // data since e.g. a[i] and a[i] & 0xff are represented by differently
+ // signed types; char vs short are disambiguated through the reference).
+ const DataType::Type type_;
// Offset of static/instance field.
// Invalid when this HeapLocation is not field.
const size_t offset_;
@@ -237,19 +244,31 @@ class HeapLocationCollector : public HGraphVisitor {
DCHECK(object != nullptr);
DCHECK(field != nullptr);
return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(object)),
+ field->GetFieldType(),
field->GetFieldOffset().SizeValue(),
nullptr,
HeapLocation::kScalar,
field->GetDeclaringClassDefIndex());
}
- size_t GetArrayHeapLocation(HInstruction* array,
- HInstruction* index,
- size_t vector_length = HeapLocation::kScalar) const {
- DCHECK(array != nullptr);
- DCHECK(index != nullptr);
- DCHECK_GE(vector_length, HeapLocation::kScalar);
+ size_t GetArrayHeapLocation(HInstruction* instruction) const {
+ DCHECK(instruction != nullptr);
+ HInstruction* array = instruction->InputAt(0);
+ HInstruction* index = instruction->InputAt(1);
+ DataType::Type type = instruction->GetType();
+ size_t vector_length = HeapLocation::kScalar;
+ if (instruction->IsArraySet()) {
+ type = instruction->AsArraySet()->GetComponentType();
+ } else if (instruction->IsVecStore() ||
+ instruction->IsVecLoad()) {
+ HVecOperation* vec_op = instruction->AsVecOperation();
+ type = vec_op->GetPackedType();
+ vector_length = vec_op->GetVectorLength();
+ } else {
+ DCHECK(instruction->IsArrayGet());
+ }
return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(array)),
+ type,
HeapLocation::kInvalidFieldOffset,
index,
vector_length,
@@ -279,13 +298,16 @@ class HeapLocationCollector : public HGraphVisitor {
// In later analysis, ComputeMayAlias() and MayAlias() compute and tell whether
// these indexes alias.
size_t FindHeapLocationIndex(ReferenceInfo* ref_info,
+ DataType::Type type,
size_t offset,
HInstruction* index,
size_t vector_length,
int16_t declaring_class_def_index) const {
+ DataType::Type lookup_type = DataType::ToSigned(type);
for (size_t i = 0; i < heap_locations_.size(); i++) {
HeapLocation* loc = heap_locations_[i];
if (loc->GetReferenceInfo() == ref_info &&
+ loc->GetType() == lookup_type &&
loc->GetOffset() == offset &&
loc->GetIndex() == index &&
loc->GetVectorLength() == vector_length &&
@@ -425,6 +447,7 @@ class HeapLocationCollector : public HGraphVisitor {
}
HeapLocation* GetOrCreateHeapLocation(HInstruction* ref,
+ DataType::Type type,
size_t offset,
HInstruction* index,
size_t vector_length,
@@ -432,10 +455,10 @@ class HeapLocationCollector : public HGraphVisitor {
HInstruction* original_ref = HuntForOriginalReference(ref);
ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref);
size_t heap_location_idx = FindHeapLocationIndex(
- ref_info, offset, index, vector_length, declaring_class_def_index);
+ ref_info, type, offset, index, vector_length, declaring_class_def_index);
if (heap_location_idx == kHeapLocationNotFound) {
HeapLocation* heap_loc = new (GetGraph()->GetAllocator())
- HeapLocation(ref_info, offset, index, vector_length, declaring_class_def_index);
+ HeapLocation(ref_info, type, offset, index, vector_length, declaring_class_def_index);
heap_locations_.push_back(heap_loc);
return heap_loc;
}
@@ -446,17 +469,23 @@ class HeapLocationCollector : public HGraphVisitor {
if (field_info.IsVolatile()) {
has_volatile_ = true;
}
+ DataType::Type type = field_info.GetFieldType();
const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex();
const size_t offset = field_info.GetFieldOffset().SizeValue();
return GetOrCreateHeapLocation(ref,
+ type,
offset,
nullptr,
HeapLocation::kScalar,
declaring_class_def_index);
}
- void VisitArrayAccess(HInstruction* array, HInstruction* index, size_t vector_length) {
+ void VisitArrayAccess(HInstruction* array,
+ HInstruction* index,
+ DataType::Type type,
+ size_t vector_length) {
GetOrCreateHeapLocation(array,
+ type,
HeapLocation::kInvalidFieldOffset,
index,
vector_length,
@@ -510,28 +539,32 @@ class HeapLocationCollector : public HGraphVisitor {
void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
HInstruction* array = instruction->InputAt(0);
HInstruction* index = instruction->InputAt(1);
- VisitArrayAccess(array, index, HeapLocation::kScalar);
+ DataType::Type type = instruction->GetType();
+ VisitArrayAccess(array, index, type, HeapLocation::kScalar);
CreateReferenceInfoForReferenceType(instruction);
}
void VisitArraySet(HArraySet* instruction) OVERRIDE {
HInstruction* array = instruction->InputAt(0);
HInstruction* index = instruction->InputAt(1);
- VisitArrayAccess(array, index, HeapLocation::kScalar);
+ DataType::Type type = instruction->GetComponentType();
+ VisitArrayAccess(array, index, type, HeapLocation::kScalar);
has_heap_stores_ = true;
}
void VisitVecLoad(HVecLoad* instruction) OVERRIDE {
HInstruction* array = instruction->InputAt(0);
HInstruction* index = instruction->InputAt(1);
- VisitArrayAccess(array, index, instruction->GetVectorLength());
+ DataType::Type type = instruction->GetPackedType();
+ VisitArrayAccess(array, index, type, instruction->GetVectorLength());
CreateReferenceInfoForReferenceType(instruction);
}
void VisitVecStore(HVecStore* instruction) OVERRIDE {
HInstruction* array = instruction->InputAt(0);
HInstruction* index = instruction->InputAt(1);
- VisitArrayAccess(array, index, instruction->GetVectorLength());
+ DataType::Type type = instruction->GetPackedType();
+ VisitArrayAccess(array, index, type, instruction->GetVectorLength());
has_heap_stores_ = true;
}
diff --git a/compiler/optimizing/load_store_analysis_test.cc b/compiler/optimizing/load_store_analysis_test.cc
index 56361a8c90..bfe7a4f72f 100644
--- a/compiler/optimizing/load_store_analysis_test.cc
+++ b/compiler/optimizing/load_store_analysis_test.cc
@@ -78,12 +78,16 @@ TEST_F(LoadStoreAnalysisTest, ArrayHeapLocations) {
// Test queries on HeapLocationCollector's ref info and index records.
ReferenceInfo* ref = heap_location_collector.FindReferenceInfoOf(array);
+ DataType::Type type = DataType::Type::kInt32;
size_t field = HeapLocation::kInvalidFieldOffset;
size_t vec = HeapLocation::kScalar;
size_t class_def = HeapLocation::kDeclaringClassDefIndexForArrays;
- size_t loc1 = heap_location_collector.FindHeapLocationIndex(ref, field, c1, vec, class_def);
- size_t loc2 = heap_location_collector.FindHeapLocationIndex(ref, field, c2, vec, class_def);
- size_t loc3 = heap_location_collector.FindHeapLocationIndex(ref, field, index, vec, class_def);
+ size_t loc1 = heap_location_collector.FindHeapLocationIndex(
+ ref, type, field, c1, vec, class_def);
+ size_t loc2 = heap_location_collector.FindHeapLocationIndex(
+ ref, type, field, c2, vec, class_def);
+ size_t loc3 = heap_location_collector.FindHeapLocationIndex(
+ ref, type, field, index, vec, class_def);
// must find this reference info for array in HeapLocationCollector.
ASSERT_TRUE(ref != nullptr);
// must find these heap locations;
@@ -246,28 +250,28 @@ TEST_F(LoadStoreAnalysisTest, ArrayIndexAliasingTest) {
size_t loc2 = HeapLocationCollector::kHeapLocationNotFound;
// Test alias: array[0] and array[1]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, c0);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, c1);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set1);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set2);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+0] and array[i-0]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add0);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub0);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set3);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set5);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+1] and array[i-1]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add1);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub1);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set4);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set6);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+1] and array[1-i]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add1);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, rev_sub1);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set4);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set7);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+1] and array[i-(-1)]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add1);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub_neg1);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set4);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set8);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
}
@@ -409,70 +413,75 @@ TEST_F(LoadStoreAnalysisTest, ArrayAliasingTest) {
size_t loc1, loc2;
// Test alias: array[0] and array[0,1,2,3]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, c0);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, c0, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_0);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_0);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
+ // Test alias: array[0] and array[1,2,3,4]
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_0);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_1);
+ ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
+
// Test alias: array[0] and array[8,9,10,11]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, c0);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, c8, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_0);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_8);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[1] and array[8,9,10,11]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, c1);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, c8, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_1);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_8);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[1] and array[0,1,2,3]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, c1);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, c0, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_1);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_0);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[0,1,2,3] and array[8,9,10,11]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, c0, 4);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, c8, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(vstore_0);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_8);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[0,1,2,3] and array[1,2,3,4]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, c1, 4);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, c0, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(vstore_0);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_1);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[0] and array[i,i+1,i+2,i+3]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, c0);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, index, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_0);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_i);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i] and array[0,1,2,3]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, index);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, c0, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_i);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_0);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i] and array[i,i+1,i+2,i+3]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, index);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, index, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_i);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_i);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i] and array[i+8,i+9,i+10,i+11]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, index);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, i_add8, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_i);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_i_add8);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+6,i+7,i+8,i+9] and array[i+8,i+9,i+10,i+11]
// Test partial overlap.
- loc1 = heap_location_collector.GetArrayHeapLocation(array, i_add6, 4);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, i_add8, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(vstore_i_add6);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_i_add8);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+6,i+7] and array[i,i+1,i+2,i+3]
// Test different vector lengths.
- loc1 = heap_location_collector.GetArrayHeapLocation(array, i_add6, 2);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, index, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(vstore_i_add6_vlen2);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_i);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+6,i+7] and array[i+8,i+9,i+10,i+11]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, i_add6, 2);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, i_add8, 4);
+ loc1 = heap_location_collector.GetArrayHeapLocation(vstore_i_add6_vlen2);
+ loc2 = heap_location_collector.GetArrayHeapLocation(vstore_i_add8);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
}
@@ -563,33 +572,33 @@ TEST_F(LoadStoreAnalysisTest, ArrayIndexCalculationOverflowTest) {
size_t loc2 = HeapLocationCollector::kHeapLocationNotFound;
// Test alias: array[i+0x80000000] and array[i-0x80000000]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add_0x80000000);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub_0x80000000);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_1);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_2);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+0x10] and array[i-0xFFFFFFF0]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add_0x10);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub_0xFFFFFFF0);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_3);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_4);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+0x7FFFFFFF] and array[i-0x80000001]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add_0x7FFFFFFF);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub_0x80000001);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_5);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_6);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Test alias: array[i+0] and array[i-0]
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add_0);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub_0);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_7);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_8);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
// Should not alias:
- loc1 = heap_location_collector.GetArrayHeapLocation(array, sub_0x80000000);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub_0x80000001);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_2);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_6);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
// Should not alias:
- loc1 = heap_location_collector.GetArrayHeapLocation(array, add_0);
- loc2 = heap_location_collector.GetArrayHeapLocation(array, sub_0x80000000);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_7);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_2);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
}
@@ -647,10 +656,10 @@ TEST_F(LoadStoreAnalysisTest, TestHuntOriginalRef) {
// times the original reference has been transformed by BoundType,
// NullCheck, IntermediateAddress, etc.
ASSERT_EQ(heap_location_collector.GetNumberOfHeapLocations(), 1U);
- size_t loc1 = heap_location_collector.GetArrayHeapLocation(array, c1);
- size_t loc2 = heap_location_collector.GetArrayHeapLocation(bound_type, c1);
- size_t loc3 = heap_location_collector.GetArrayHeapLocation(null_check, c1);
- size_t loc4 = heap_location_collector.GetArrayHeapLocation(inter_addr, c1);
+ size_t loc1 = heap_location_collector.GetArrayHeapLocation(array_get1);
+ size_t loc2 = heap_location_collector.GetArrayHeapLocation(array_get2);
+ size_t loc3 = heap_location_collector.GetArrayHeapLocation(array_get3);
+ size_t loc4 = heap_location_collector.GetArrayHeapLocation(array_get4);
ASSERT_TRUE(loc1 != HeapLocationCollector::kHeapLocationNotFound);
ASSERT_EQ(loc1, loc2);
ASSERT_EQ(loc1, loc3);
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index d598ff592d..35e64f75b9 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -160,7 +160,7 @@ class LSEVisitor : public HGraphDelegateVisitor {
// Scan the list of removed loads to see if we can reuse `type_conversion`, if
// the other removed load has the same substitute and type and is dominated
- // by `type_conversioni`.
+ // by `type_conversion`.
void TryToReuseTypeConversion(HInstruction* type_conversion, size_t index) {
size_t size = removed_loads_.size();
HInstruction* load = removed_loads_[index];
@@ -542,16 +542,7 @@ class LSEVisitor : public HGraphDelegateVisitor {
}
}
- void VisitGetLocation(HInstruction* instruction,
- HInstruction* ref,
- size_t offset,
- HInstruction* index,
- size_t vector_length,
- int16_t declaring_class_def_index) {
- HInstruction* original_ref = heap_location_collector_.HuntForOriginalReference(ref);
- ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
- size_t idx = heap_location_collector_.FindHeapLocationIndex(
- ref_info, offset, index, vector_length, declaring_class_def_index);
+ void VisitGetLocation(HInstruction* instruction, size_t idx) {
DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
ScopedArenaVector<HInstruction*>& heap_values =
heap_values_for_[instruction->GetBlock()->GetBlockId()];
@@ -569,23 +560,7 @@ class LSEVisitor : public HGraphDelegateVisitor {
heap_values[idx] = instruction;
KeepStoresIfAliasedToLocation(heap_values, idx);
} else {
- if (DataType::Kind(heap_value->GetType()) != DataType::Kind(instruction->GetType())) {
- // The only situation where the same heap location has different type is when
- // we do an array get on an instruction that originates from the null constant
- // (the null could be behind a field access, an array access, a null check or
- // a bound type).
- // In order to stay properly typed on primitive types, we do not eliminate
- // the array gets.
- if (kIsDebugBuild) {
- DCHECK(heap_value->IsArrayGet()) << heap_value->DebugName();
- DCHECK(instruction->IsArrayGet()) << instruction->DebugName();
- }
- // Load isn't eliminated. Put the load as the value into the HeapLocation.
- // This acts like GVN but with better aliasing analysis.
- heap_values[idx] = instruction;
- KeepStoresIfAliasedToLocation(heap_values, idx);
- return;
- }
+ // Load is eliminated.
AddRemovedLoad(instruction, heap_value);
TryRemovingNullCheck(instruction);
}
@@ -610,21 +585,11 @@ class LSEVisitor : public HGraphDelegateVisitor {
return false;
}
- void VisitSetLocation(HInstruction* instruction,
- HInstruction* ref,
- size_t offset,
- HInstruction* index,
- size_t vector_length,
- int16_t declaring_class_def_index,
- HInstruction* value) {
+ void VisitSetLocation(HInstruction* instruction, size_t idx, HInstruction* value) {
+ DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
DCHECK(!IsStore(value)) << value->DebugName();
// value may already have a substitute.
value = FindSubstitute(value);
- HInstruction* original_ref = heap_location_collector_.HuntForOriginalReference(ref);
- ReferenceInfo* ref_info = heap_location_collector_.FindReferenceInfoOf(original_ref);
- size_t idx = heap_location_collector_.FindHeapLocationIndex(
- ref_info, offset, index, vector_length, declaring_class_def_index);
- DCHECK_NE(idx, HeapLocationCollector::kHeapLocationNotFound);
ScopedArenaVector<HInstruction*>& heap_values =
heap_values_for_[instruction->GetBlock()->GetBlockId()];
HInstruction* heap_value = heap_values[idx];
@@ -644,7 +609,8 @@ class LSEVisitor : public HGraphDelegateVisitor {
} else if (!loop_info->IsIrreducible()) {
// instruction is a store in the loop so the loop must do write.
DCHECK(side_effects_.GetLoopEffects(loop_info->GetHeader()).DoesAnyWrite());
- if (ref_info->IsSingleton() && !loop_info->IsDefinedOutOfTheLoop(original_ref)) {
+ ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(idx)->GetReferenceInfo();
+ if (ref_info->IsSingleton() && !loop_info->IsDefinedOutOfTheLoop(ref_info->GetReference())) {
// original_ref is created inside the loop. Value stored to it isn't needed at
// the loop header. This is true for outer loops also.
possibly_redundant = true;
@@ -686,79 +652,39 @@ class LSEVisitor : public HGraphDelegateVisitor {
}
void VisitInstanceFieldGet(HInstanceFieldGet* instruction) OVERRIDE {
- HInstruction* obj = instruction->InputAt(0);
- size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
- int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
- VisitGetLocation(instruction,
- obj,
- offset,
- nullptr,
- HeapLocation::kScalar,
- declaring_class_def_index);
+ HInstruction* object = instruction->InputAt(0);
+ const FieldInfo& field = instruction->GetFieldInfo();
+ VisitGetLocation(instruction, heap_location_collector_.GetFieldHeapLocation(object, &field));
}
void VisitInstanceFieldSet(HInstanceFieldSet* instruction) OVERRIDE {
- HInstruction* obj = instruction->InputAt(0);
- size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
- int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
+ HInstruction* object = instruction->InputAt(0);
+ const FieldInfo& field = instruction->GetFieldInfo();
HInstruction* value = instruction->InputAt(1);
- VisitSetLocation(instruction,
- obj,
- offset,
- nullptr,
- HeapLocation::kScalar,
- declaring_class_def_index,
- value);
+ size_t idx = heap_location_collector_.GetFieldHeapLocation(object, &field);
+ VisitSetLocation(instruction, idx, value);
}
void VisitStaticFieldGet(HStaticFieldGet* instruction) OVERRIDE {
HInstruction* cls = instruction->InputAt(0);
- size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
- int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
- VisitGetLocation(instruction,
- cls,
- offset,
- nullptr,
- HeapLocation::kScalar,
- declaring_class_def_index);
+ const FieldInfo& field = instruction->GetFieldInfo();
+ VisitGetLocation(instruction, heap_location_collector_.GetFieldHeapLocation(cls, &field));
}
void VisitStaticFieldSet(HStaticFieldSet* instruction) OVERRIDE {
HInstruction* cls = instruction->InputAt(0);
- size_t offset = instruction->GetFieldInfo().GetFieldOffset().SizeValue();
- int16_t declaring_class_def_index = instruction->GetFieldInfo().GetDeclaringClassDefIndex();
- HInstruction* value = instruction->InputAt(1);
- VisitSetLocation(instruction,
- cls,
- offset,
- nullptr,
- HeapLocation::kScalar,
- declaring_class_def_index,
- value);
+ const FieldInfo& field = instruction->GetFieldInfo();
+ size_t idx = heap_location_collector_.GetFieldHeapLocation(cls, &field);
+ VisitSetLocation(instruction, idx, instruction->InputAt(1));
}
void VisitArrayGet(HArrayGet* instruction) OVERRIDE {
- HInstruction* array = instruction->InputAt(0);
- HInstruction* index = instruction->InputAt(1);
- VisitGetLocation(instruction,
- array,
- HeapLocation::kInvalidFieldOffset,
- index,
- HeapLocation::kScalar,
- HeapLocation::kDeclaringClassDefIndexForArrays);
+ VisitGetLocation(instruction, heap_location_collector_.GetArrayHeapLocation(instruction));
}
void VisitArraySet(HArraySet* instruction) OVERRIDE {
- HInstruction* array = instruction->InputAt(0);
- HInstruction* index = instruction->InputAt(1);
- HInstruction* value = instruction->InputAt(2);
- VisitSetLocation(instruction,
- array,
- HeapLocation::kInvalidFieldOffset,
- index,
- HeapLocation::kScalar,
- HeapLocation::kDeclaringClassDefIndexForArrays,
- value);
+ size_t idx = heap_location_collector_.GetArrayHeapLocation(instruction);
+ VisitSetLocation(instruction, idx, instruction->InputAt(2));
}
void VisitDeoptimize(HDeoptimize* instruction) {
@@ -971,6 +897,7 @@ bool LoadStoreElimination::Run() {
lse_visitor.VisitBasicBlock(block);
}
lse_visitor.RemoveInstructions();
+
return true;
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index a7c2d0b125..e786502dee 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -6548,7 +6548,7 @@ class HLoadMethodHandle FINAL : public HInstruction {
class HLoadMethodType FINAL : public HInstruction {
public:
HLoadMethodType(HCurrentMethod* current_method,
- uint16_t proto_idx,
+ dex::ProtoIndex proto_index,
const DexFile& dex_file,
uint32_t dex_pc)
: HInstruction(kLoadMethodType,
@@ -6556,7 +6556,7 @@ class HLoadMethodType FINAL : public HInstruction {
SideEffectsForArchRuntimeCalls(),
dex_pc),
special_input_(HUserRecord<HInstruction*>(current_method)),
- proto_idx_(proto_idx),
+ proto_index_(proto_index),
dex_file_(dex_file) {
}
@@ -6568,7 +6568,7 @@ class HLoadMethodType FINAL : public HInstruction {
bool IsClonable() const OVERRIDE { return true; }
- uint16_t GetProtoIndex() const { return proto_idx_; }
+ dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
const DexFile& GetDexFile() const { return dex_file_; }
@@ -6585,7 +6585,7 @@ class HLoadMethodType FINAL : public HInstruction {
// The special input is the HCurrentMethod for kRuntimeCall.
HUserRecord<HInstruction*> special_input_;
- const uint16_t proto_idx_;
+ const dex::ProtoIndex proto_index_;
const DexFile& dex_file_;
};
diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h
index 9a26f2f6c4..f246228074 100644
--- a/compiler/optimizing/optimizing_compiler_stats.h
+++ b/compiler/optimizing/optimizing_compiler_stats.h
@@ -50,6 +50,7 @@ enum class MethodCompilationStat {
kNotCompiledThrowCatchLoop,
kNotCompiledAmbiguousArrayOp,
kNotCompiledHugeMethod,
+ kNotCompiledIrreducibleAndStringInit,
kNotCompiledLargeMethodNoBranches,
kNotCompiledMalformedOpcode,
kNotCompiledNoCodegen,
diff --git a/compiler/optimizing/scheduler.cc b/compiler/optimizing/scheduler.cc
index e014efaf5c..588ea03d69 100644
--- a/compiler/optimizing/scheduler.cc
+++ b/compiler/optimizing/scheduler.cc
@@ -70,19 +70,19 @@ static bool MayHaveReorderingDependency(SideEffects node, SideEffects other) {
return false;
}
-size_t SchedulingGraph::ArrayAccessHeapLocation(HInstruction* array, HInstruction* index) const {
+size_t SchedulingGraph::ArrayAccessHeapLocation(HInstruction* instruction) const {
DCHECK(heap_location_collector_ != nullptr);
- size_t heap_loc = heap_location_collector_->GetArrayHeapLocation(array, index);
+ size_t heap_loc = heap_location_collector_->GetArrayHeapLocation(instruction);
// This array access should be analyzed and added to HeapLocationCollector before.
DCHECK(heap_loc != HeapLocationCollector::kHeapLocationNotFound);
return heap_loc;
}
-bool SchedulingGraph::ArrayAccessMayAlias(const HInstruction* node,
- const HInstruction* other) const {
+bool SchedulingGraph::ArrayAccessMayAlias(HInstruction* node,
+ HInstruction* other) const {
DCHECK(heap_location_collector_ != nullptr);
- size_t node_heap_loc = ArrayAccessHeapLocation(node->InputAt(0), node->InputAt(1));
- size_t other_heap_loc = ArrayAccessHeapLocation(other->InputAt(0), other->InputAt(1));
+ size_t node_heap_loc = ArrayAccessHeapLocation(node);
+ size_t other_heap_loc = ArrayAccessHeapLocation(other);
// For example: arr[0] and arr[0]
if (node_heap_loc == other_heap_loc) {
@@ -194,8 +194,8 @@ bool SchedulingGraph::FieldAccessMayAlias(const HInstruction* node,
return true;
}
-bool SchedulingGraph::HasMemoryDependency(const HInstruction* node,
- const HInstruction* other) const {
+bool SchedulingGraph::HasMemoryDependency(HInstruction* node,
+ HInstruction* other) const {
if (!MayHaveReorderingDependency(node->GetSideEffects(), other->GetSideEffects())) {
return false;
}
@@ -264,8 +264,8 @@ bool SchedulingGraph::HasExceptionDependency(const HInstruction* node,
// Check whether `node` depends on `other`, taking into account `SideEffect`
// information and `CanThrow` information.
-bool SchedulingGraph::HasSideEffectDependency(const HInstruction* node,
- const HInstruction* other) const {
+bool SchedulingGraph::HasSideEffectDependency(HInstruction* node,
+ HInstruction* other) const {
if (HasMemoryDependency(node, other)) {
return true;
}
diff --git a/compiler/optimizing/scheduler.h b/compiler/optimizing/scheduler.h
index 51cd20aea9..8e98f192d8 100644
--- a/compiler/optimizing/scheduler.h
+++ b/compiler/optimizing/scheduler.h
@@ -310,12 +310,12 @@ class SchedulingGraph : public ValueObject {
void AddOtherDependency(SchedulingNode* node, SchedulingNode* dependency) {
AddDependency(node, dependency, /*is_data_dependency*/false);
}
- bool HasMemoryDependency(const HInstruction* node, const HInstruction* other) const;
+ bool HasMemoryDependency(HInstruction* node, HInstruction* other) const;
bool HasExceptionDependency(const HInstruction* node, const HInstruction* other) const;
- bool HasSideEffectDependency(const HInstruction* node, const HInstruction* other) const;
- bool ArrayAccessMayAlias(const HInstruction* node, const HInstruction* other) const;
+ bool HasSideEffectDependency(HInstruction* node, HInstruction* other) const;
+ bool ArrayAccessMayAlias(HInstruction* node, HInstruction* other) const;
bool FieldAccessMayAlias(const HInstruction* node, const HInstruction* other) const;
- size_t ArrayAccessHeapLocation(HInstruction* array, HInstruction* index) const;
+ size_t ArrayAccessHeapLocation(HInstruction* instruction) const;
size_t FieldAccessHeapLocation(HInstruction* obj, const FieldInfo* field) const;
// Add dependencies nodes for the given `HInstruction`: inputs, environments, and side-effects.
diff --git a/compiler/optimizing/scheduler_test.cc b/compiler/optimizing/scheduler_test.cc
index fb15fc8975..d4cae72c7e 100644
--- a/compiler/optimizing/scheduler_test.cc
+++ b/compiler/optimizing/scheduler_test.cc
@@ -296,38 +296,38 @@ class SchedulerTest : public OptimizingUnitTest {
size_t loc2 = HeapLocationCollector::kHeapLocationNotFound;
// Test side effect dependency: array[0] and array[1]
- loc1 = heap_location_collector.GetArrayHeapLocation(arr, c0);
- loc2 = heap_location_collector.GetArrayHeapLocation(arr, c1);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_0);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_1);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
ASSERT_FALSE(scheduling_graph.HasImmediateOtherDependency(arr_set_1, arr_set_0));
// Test side effect dependency based on LSA analysis: array[i] and array[j]
- loc1 = heap_location_collector.GetArrayHeapLocation(arr, i);
- loc2 = heap_location_collector.GetArrayHeapLocation(arr, j);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_i);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_j);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
ASSERT_TRUE(scheduling_graph.HasImmediateOtherDependency(arr_set_j, arr_set_i));
// Test side effect dependency based on LSA analysis: array[i] and array[i+0]
- loc1 = heap_location_collector.GetArrayHeapLocation(arr, i);
- loc2 = heap_location_collector.GetArrayHeapLocation(arr, add0);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_i);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_add0);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
ASSERT_TRUE(scheduling_graph.HasImmediateOtherDependency(arr_set_add0, arr_set_i));
// Test side effect dependency based on LSA analysis: array[i] and array[i-0]
- loc1 = heap_location_collector.GetArrayHeapLocation(arr, i);
- loc2 = heap_location_collector.GetArrayHeapLocation(arr, sub0);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_i);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_sub0);
ASSERT_TRUE(heap_location_collector.MayAlias(loc1, loc2));
ASSERT_TRUE(scheduling_graph.HasImmediateOtherDependency(arr_set_sub0, arr_set_i));
// Test side effect dependency based on LSA analysis: array[i] and array[i+1]
- loc1 = heap_location_collector.GetArrayHeapLocation(arr, i);
- loc2 = heap_location_collector.GetArrayHeapLocation(arr, add1);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_i);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_add1);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
ASSERT_FALSE(scheduling_graph.HasImmediateOtherDependency(arr_set_add1, arr_set_i));
// Test side effect dependency based on LSA analysis: array[i+1] and array[i-1]
- loc1 = heap_location_collector.GetArrayHeapLocation(arr, add1);
- loc2 = heap_location_collector.GetArrayHeapLocation(arr, sub1);
+ loc1 = heap_location_collector.GetArrayHeapLocation(arr_set_add1);
+ loc2 = heap_location_collector.GetArrayHeapLocation(arr_set_sub1);
ASSERT_FALSE(heap_location_collector.MayAlias(loc1, loc2));
ASSERT_FALSE(scheduling_graph.HasImmediateOtherDependency(arr_set_sub1, arr_set_add1));
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 7010e3f380..aa28c8b500 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -51,15 +51,7 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
if (sp_mask != nullptr) {
stack_mask_max_ = std::max(stack_mask_max_, sp_mask->GetHighestBitSet());
}
- if (inlining_depth > 0) {
- number_of_stack_maps_with_inline_info_++;
- }
- // Note: dex_pc can be kNoDexPc for native method intrinsics.
- if (dex_pc != dex::kDexNoIndex && (dex_pc_max_ == dex::kDexNoIndex || dex_pc_max_ < dex_pc)) {
- dex_pc_max_ = dex_pc;
- }
- register_mask_max_ = std::max(register_mask_max_, register_mask);
current_dex_register_ = 0;
}
@@ -146,51 +138,6 @@ void StackMapStream::EndInlineInfoEntry() {
current_inline_info_ = InlineInfoEntry();
}
-CodeOffset StackMapStream::ComputeMaxNativePcCodeOffset() const {
- CodeOffset max_native_pc_offset;
- for (const StackMapEntry& entry : stack_maps_) {
- max_native_pc_offset = std::max(max_native_pc_offset, entry.native_pc_code_offset);
- }
- return max_native_pc_offset;
-}
-
-size_t StackMapStream::PrepareForFillIn() {
- CodeInfoEncoding encoding;
- encoding.dex_register_map.num_entries = 0; // TODO: Remove this field.
- encoding.dex_register_map.num_bytes = ComputeDexRegisterMapsSize();
- encoding.location_catalog.num_entries = location_catalog_entries_.size();
- encoding.location_catalog.num_bytes = ComputeDexRegisterLocationCatalogSize();
- encoding.inline_info.num_entries = inline_infos_.size();
- // Must be done before calling ComputeInlineInfoEncoding since ComputeInlineInfoEncoding requires
- // dex_method_index_idx to be filled in.
- PrepareMethodIndices();
- ComputeInlineInfoEncoding(&encoding.inline_info.encoding,
- encoding.dex_register_map.num_bytes);
- CodeOffset max_native_pc_offset = ComputeMaxNativePcCodeOffset();
- // Prepare the CodeInfo variable-sized encoding.
- encoding.stack_mask.encoding.num_bits = stack_mask_max_ + 1; // Need room for max element too.
- encoding.stack_mask.num_entries = PrepareStackMasks(encoding.stack_mask.encoding.num_bits);
- encoding.register_mask.encoding.num_bits = MinimumBitsToStore(register_mask_max_);
- encoding.register_mask.num_entries = PrepareRegisterMasks();
- encoding.stack_map.num_entries = stack_maps_.size();
- encoding.stack_map.encoding.SetFromSizes(
- // The stack map contains compressed native PC offsets.
- max_native_pc_offset.CompressedValue(),
- dex_pc_max_,
- encoding.dex_register_map.num_bytes,
- encoding.inline_info.num_entries,
- encoding.register_mask.num_entries,
- encoding.stack_mask.num_entries);
- ComputeInvokeInfoEncoding(&encoding);
- DCHECK_EQ(code_info_encoding_.size(), 0u);
- encoding.Compress(&code_info_encoding_);
- encoding.ComputeTableOffsets();
- // Compute table offsets so we can get the non header size.
- DCHECK_EQ(encoding.HeaderSize(), code_info_encoding_.size());
- needed_size_ = code_info_encoding_.size() + encoding.NonHeaderSize();
- return needed_size_;
-}
-
size_t StackMapStream::ComputeDexRegisterLocationCatalogSize() const {
size_t size = DexRegisterLocationCatalog::kFixedSize;
for (const DexRegisterLocation& dex_register_location : location_catalog_entries_) {
@@ -204,6 +151,10 @@ size_t StackMapStream::DexRegisterMapEntry::ComputeSize(size_t catalog_size) con
if (num_dex_registers == 0u) {
return 0u; // No register map will be emitted.
}
+ size_t number_of_live_dex_registers = live_dex_registers_mask->NumSetBits();
+ if (live_dex_registers_mask->NumSetBits() == 0) {
+ return 0u; // No register map will be emitted.
+ }
DCHECK(live_dex_registers_mask != nullptr);
// Size of the map in bytes.
@@ -211,7 +162,6 @@ size_t StackMapStream::DexRegisterMapEntry::ComputeSize(size_t catalog_size) con
// Add the live bit mask for the Dex register liveness.
size += DexRegisterMap::GetLiveBitMaskSize(num_dex_registers);
// Compute the size of the set of live Dex register entries.
- size_t number_of_live_dex_registers = live_dex_registers_mask->NumSetBits();
size_t map_entries_size_in_bits =
DexRegisterMap::SingleEntrySizeInBits(catalog_size) * number_of_live_dex_registers;
size_t map_entries_size_in_bytes =
@@ -220,86 +170,6 @@ size_t StackMapStream::DexRegisterMapEntry::ComputeSize(size_t catalog_size) con
return size;
}
-size_t StackMapStream::ComputeDexRegisterMapsSize() const {
- size_t size = 0;
- for (const DexRegisterMapEntry& entry : dex_register_entries_) {
- size += entry.ComputeSize(location_catalog_entries_.size());
- }
- return size;
-}
-
-void StackMapStream::ComputeInvokeInfoEncoding(CodeInfoEncoding* encoding) {
- DCHECK(encoding != nullptr);
- uint32_t native_pc_max = 0;
- uint16_t method_index_max = 0;
- size_t invoke_infos_count = 0;
- size_t invoke_type_max = 0;
- for (const StackMapEntry& entry : stack_maps_) {
- if (entry.dex_method_index != dex::kDexNoIndex) {
- native_pc_max = std::max(native_pc_max, entry.native_pc_code_offset.CompressedValue());
- method_index_max = std::max(method_index_max, static_cast<uint16_t>(entry.dex_method_index));
- invoke_type_max = std::max(invoke_type_max, static_cast<size_t>(entry.invoke_type));
- ++invoke_infos_count;
- }
- }
- encoding->invoke_info.num_entries = invoke_infos_count;
- encoding->invoke_info.encoding.SetFromSizes(native_pc_max, invoke_type_max, method_index_max);
-}
-
-void StackMapStream::ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
- size_t dex_register_maps_bytes) {
- uint32_t method_index_max = 0;
- uint32_t dex_pc_max = dex::kDexNoIndex;
- uint32_t extra_data_max = 0;
-
- uint32_t inline_info_index = 0;
- for (const StackMapEntry& entry : stack_maps_) {
- for (size_t j = 0; j < entry.inlining_depth; ++j) {
- InlineInfoEntry inline_entry = inline_infos_[inline_info_index++];
- if (inline_entry.method == nullptr) {
- method_index_max = std::max(method_index_max, inline_entry.dex_method_index_idx);
- extra_data_max = std::max(extra_data_max, 1u);
- } else {
- method_index_max = std::max(
- method_index_max, High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
- extra_data_max = std::max(
- extra_data_max, Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
- }
- if (inline_entry.dex_pc != dex::kDexNoIndex &&
- (dex_pc_max == dex::kDexNoIndex || dex_pc_max < inline_entry.dex_pc)) {
- dex_pc_max = inline_entry.dex_pc;
- }
- }
- }
- DCHECK_EQ(inline_info_index, inline_infos_.size());
-
- encoding->SetFromSizes(method_index_max, dex_pc_max, extra_data_max, dex_register_maps_bytes);
-}
-
-size_t StackMapStream::MaybeCopyDexRegisterMap(DexRegisterMapEntry& entry,
- size_t* current_offset,
- MemoryRegion dex_register_locations_region) {
- DCHECK(current_offset != nullptr);
- if ((entry.num_dex_registers == 0) || (entry.live_dex_registers_mask->NumSetBits() == 0)) {
- // No dex register map needed.
- return StackMap::kNoDexRegisterMap;
- }
- if (entry.offset == DexRegisterMapEntry::kOffsetUnassigned) {
- // Not already copied, need to copy and and assign an offset.
- entry.offset = *current_offset;
- const size_t entry_size = entry.ComputeSize(location_catalog_entries_.size());
- DexRegisterMap dex_register_map(
- dex_register_locations_region.Subregion(entry.offset, entry_size));
- *current_offset += entry_size;
- // Fill in the map since it was just added.
- FillInDexRegisterMap(dex_register_map,
- entry.num_dex_registers,
- *entry.live_dex_registers_mask,
- entry.locations_start_index);
- }
- return entry.offset;
-}
-
void StackMapStream::FillInMethodInfo(MemoryRegion region) {
{
MethodInfo info(region.begin(), method_indices_.size());
@@ -318,30 +188,64 @@ void StackMapStream::FillInMethodInfo(MemoryRegion region) {
}
}
-void StackMapStream::FillInCodeInfo(MemoryRegion region) {
- DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
- DCHECK_NE(0u, needed_size_) << "PrepareForFillIn not called before FillIn";
+template<typename Vector>
+static MemoryRegion EncodeMemoryRegion(Vector* out, size_t* bit_offset, uint32_t bit_length) {
+ uint32_t byte_length = BitsToBytesRoundUp(bit_length);
+ EncodeVarintBits(out, bit_offset, byte_length);
+ *bit_offset = RoundUp(*bit_offset, kBitsPerByte);
+ out->resize(out->size() + byte_length);
+ MemoryRegion region(out->data() + *bit_offset / kBitsPerByte, byte_length);
+ *bit_offset += kBitsPerByte * byte_length;
+ return region;
+}
- DCHECK_EQ(region.size(), needed_size_);
+template<uint32_t NumColumns>
+using ScopedBitTableBuilder = BitTableBuilder<NumColumns, ScopedArenaAllocatorAdapter<uint32_t>>;
- // Note that the memory region does not have to be zeroed when we JIT code
- // because we do not use the arena allocator there.
+size_t StackMapStream::PrepareForFillIn() {
+ size_t bit_offset = 0;
+ out_.clear();
- // Write the CodeInfo header.
- region.CopyFrom(0, MemoryRegion(code_info_encoding_.data(), code_info_encoding_.size()));
+ // Decide the offsets of dex register map entries, but do not write them out yet.
+ // Needs to be done first as it modifies the stack map entry.
+ size_t dex_register_map_bytes = 0;
+ for (DexRegisterMapEntry& entry : dex_register_entries_) {
+ size_t size = entry.ComputeSize(location_catalog_entries_.size());
+ entry.offset = size == 0 ? DexRegisterMapEntry::kOffsetUnassigned : dex_register_map_bytes;
+ dex_register_map_bytes += size;
+ }
- CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- DCHECK_EQ(encoding.stack_map.num_entries, stack_maps_.size());
+ // Must be done before calling ComputeInlineInfoEncoding since ComputeInlineInfoEncoding requires
+ // dex_method_index_idx to be filled in.
+ PrepareMethodIndices();
- MemoryRegion dex_register_locations_region = region.Subregion(
- encoding.dex_register_map.byte_offset,
- encoding.dex_register_map.num_bytes);
+ // Dedup stack masks. Needs to be done first as it modifies the stack map entry.
+ size_t stack_mask_bits = stack_mask_max_ + 1; // Need room for max element too.
+ size_t num_stack_masks = PrepareStackMasks(stack_mask_bits);
+
+ // Dedup register masks. Needs to be done first as it modifies the stack map entry.
+ size_t num_register_masks = PrepareRegisterMasks();
+
+ // Write dex register maps.
+ MemoryRegion dex_register_map_region =
+ EncodeMemoryRegion(&out_, &bit_offset, dex_register_map_bytes * kBitsPerByte);
+ for (DexRegisterMapEntry& entry : dex_register_entries_) {
+ size_t entry_size = entry.ComputeSize(location_catalog_entries_.size());
+ if (entry_size != 0) {
+ DexRegisterMap dex_register_map(
+ dex_register_map_region.Subregion(entry.offset, entry_size));
+ FillInDexRegisterMap(dex_register_map,
+ entry.num_dex_registers,
+ *entry.live_dex_registers_mask,
+ entry.locations_start_index);
+ }
+ }
- // Set the Dex register location catalog.
- MemoryRegion dex_register_location_catalog_region = region.Subregion(
- encoding.location_catalog.byte_offset,
- encoding.location_catalog.num_bytes);
+ // Write dex register catalog.
+ EncodeVarintBits(&out_, &bit_offset, location_catalog_entries_.size());
+ size_t location_catalog_bytes = ComputeDexRegisterLocationCatalogSize();
+ MemoryRegion dex_register_location_catalog_region =
+ EncodeMemoryRegion(&out_, &bit_offset, location_catalog_bytes * kBitsPerByte);
DexRegisterLocationCatalog dex_register_location_catalog(dex_register_location_catalog_region);
// Offset in `dex_register_location_catalog` where to store the next
// register location.
@@ -353,93 +257,87 @@ void StackMapStream::FillInCodeInfo(MemoryRegion region) {
// Ensure we reached the end of the Dex registers location_catalog.
DCHECK_EQ(location_catalog_offset, dex_register_location_catalog_region.size());
- ArenaBitVector empty_bitmask(allocator_, 0, /* expandable */ false, kArenaAllocStackMapStream);
- uintptr_t next_dex_register_map_offset = 0;
- uintptr_t next_inline_info_index = 0;
- size_t invoke_info_idx = 0;
- for (size_t i = 0, e = stack_maps_.size(); i < e; ++i) {
- StackMap stack_map = code_info.GetStackMapAt(i, encoding);
- StackMapEntry entry = stack_maps_[i];
-
- stack_map.SetDexPc(encoding.stack_map.encoding, entry.dex_pc);
- stack_map.SetNativePcCodeOffset(encoding.stack_map.encoding, entry.native_pc_code_offset);
- stack_map.SetRegisterMaskIndex(encoding.stack_map.encoding, entry.register_mask_index);
- stack_map.SetStackMaskIndex(encoding.stack_map.encoding, entry.stack_mask_index);
-
- size_t offset = MaybeCopyDexRegisterMap(dex_register_entries_[entry.dex_register_map_index],
- &next_dex_register_map_offset,
- dex_register_locations_region);
- stack_map.SetDexRegisterMapOffset(encoding.stack_map.encoding, offset);
-
+ // Write stack maps.
+ ScopedArenaAllocatorAdapter<void> adapter = allocator_->Adapter(kArenaAllocStackMapStream);
+ ScopedBitTableBuilder<StackMap::Field::kCount> stack_map_builder((adapter));
+ ScopedBitTableBuilder<InvokeInfo::Field::kCount> invoke_info_builder((adapter));
+ ScopedBitTableBuilder<InlineInfo::Field::kCount> inline_info_builder((adapter));
+ for (const StackMapEntry& entry : stack_maps_) {
if (entry.dex_method_index != dex::kDexNoIndex) {
- InvokeInfo invoke_info(code_info.GetInvokeInfo(encoding, invoke_info_idx));
- invoke_info.SetNativePcCodeOffset(encoding.invoke_info.encoding, entry.native_pc_code_offset);
- invoke_info.SetInvokeType(encoding.invoke_info.encoding, entry.invoke_type);
- invoke_info.SetMethodIndexIdx(encoding.invoke_info.encoding, entry.dex_method_index_idx);
- ++invoke_info_idx;
+ invoke_info_builder.AddRow(
+ entry.native_pc_code_offset.CompressedValue(),
+ entry.invoke_type,
+ entry.dex_method_index_idx);
}
// Set the inlining info.
- if (entry.inlining_depth != 0) {
- InlineInfo inline_info = code_info.GetInlineInfo(next_inline_info_index, encoding);
-
- // Fill in the index.
- stack_map.SetInlineInfoIndex(encoding.stack_map.encoding, next_inline_info_index);
- DCHECK_EQ(next_inline_info_index, entry.inline_infos_start_index);
- next_inline_info_index += entry.inlining_depth;
-
- inline_info.SetDepth(encoding.inline_info.encoding, entry.inlining_depth);
- DCHECK_LE(entry.inline_infos_start_index + entry.inlining_depth, inline_infos_.size());
-
- for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
- InlineInfoEntry inline_entry = inline_infos_[depth + entry.inline_infos_start_index];
- if (inline_entry.method != nullptr) {
- inline_info.SetMethodIndexIdxAtDepth(
- encoding.inline_info.encoding,
- depth,
- High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
- inline_info.SetExtraDataAtDepth(
- encoding.inline_info.encoding,
- depth,
- Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
- } else {
- inline_info.SetMethodIndexIdxAtDepth(encoding.inline_info.encoding,
- depth,
- inline_entry.dex_method_index_idx);
- inline_info.SetExtraDataAtDepth(encoding.inline_info.encoding, depth, 1);
- }
- inline_info.SetDexPcAtDepth(encoding.inline_info.encoding, depth, inline_entry.dex_pc);
- size_t dex_register_map_offset = MaybeCopyDexRegisterMap(
- dex_register_entries_[inline_entry.dex_register_map_index],
- &next_dex_register_map_offset,
- dex_register_locations_region);
- inline_info.SetDexRegisterMapOffsetAtDepth(encoding.inline_info.encoding,
- depth,
- dex_register_map_offset);
+ uint32_t inline_info_index = StackMap::kNoValue;
+ DCHECK_LE(entry.inline_infos_start_index + entry.inlining_depth, inline_infos_.size());
+ for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
+ InlineInfoEntry inline_entry = inline_infos_[depth + entry.inline_infos_start_index];
+ uint32_t method_index_idx = inline_entry.dex_method_index_idx;
+ uint32_t extra_data = 1;
+ if (inline_entry.method != nullptr) {
+ method_index_idx = High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method));
+ extra_data = Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method));
+ }
+ uint32_t index = inline_info_builder.AddRow(
+ (depth == entry.inlining_depth - 1) ? InlineInfo::kLast : InlineInfo::kMore,
+ method_index_idx,
+ inline_entry.dex_pc,
+ extra_data,
+ dex_register_entries_[inline_entry.dex_register_map_index].offset);
+ if (depth == 0) {
+ inline_info_index = index;
}
- } else if (encoding.stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
- stack_map.SetInlineInfoIndex(encoding.stack_map.encoding, StackMap::kNoInlineInfo);
}
+ stack_map_builder.AddRow(
+ entry.native_pc_code_offset.CompressedValue(),
+ entry.dex_pc,
+ dex_register_entries_[entry.dex_register_map_index].offset,
+ inline_info_index,
+ entry.register_mask_index,
+ entry.stack_mask_index);
+ }
+ stack_map_builder.Encode(&out_, &bit_offset);
+ invoke_info_builder.Encode(&out_, &bit_offset);
+ inline_info_builder.Encode(&out_, &bit_offset);
+
+ // Write register masks table.
+ ScopedBitTableBuilder<1> register_mask_builder((adapter));
+ for (size_t i = 0; i < num_register_masks; ++i) {
+ register_mask_builder.AddRow(register_masks_[i]);
}
+ register_mask_builder.Encode(&out_, &bit_offset);
// Write stack masks table.
- const size_t stack_mask_bits = encoding.stack_mask.encoding.BitSize();
+ EncodeVarintBits(&out_, &bit_offset, stack_mask_bits);
+ out_.resize(BitsToBytesRoundUp(bit_offset + stack_mask_bits * num_stack_masks));
+ BitMemoryRegion stack_mask_region(MemoryRegion(out_.data(), out_.size()),
+ bit_offset,
+ stack_mask_bits * num_stack_masks);
if (stack_mask_bits > 0) {
- size_t stack_mask_bytes = RoundUp(stack_mask_bits, kBitsPerByte) / kBitsPerByte;
- for (size_t i = 0; i < encoding.stack_mask.num_entries; ++i) {
- MemoryRegion source(&stack_masks_[i * stack_mask_bytes], stack_mask_bytes);
- BitMemoryRegion stack_mask = code_info.GetStackMask(i, encoding);
- for (size_t bit_index = 0; bit_index < stack_mask_bits; ++bit_index) {
- stack_mask.StoreBit(bit_index, source.LoadBit(bit_index));
+ for (size_t i = 0; i < num_stack_masks; ++i) {
+ size_t stack_mask_bytes = BitsToBytesRoundUp(stack_mask_bits);
+ BitMemoryRegion src(MemoryRegion(&stack_masks_[i * stack_mask_bytes], stack_mask_bytes));
+ BitMemoryRegion dst = stack_mask_region.Subregion(i * stack_mask_bits, stack_mask_bits);
+ for (size_t bit_index = 0; bit_index < stack_mask_bits; bit_index += BitSizeOf<uint32_t>()) {
+ size_t num_bits = std::min<size_t>(stack_mask_bits - bit_index, BitSizeOf<uint32_t>());
+ dst.StoreBits(bit_index, src.LoadBits(bit_index, num_bits), num_bits);
}
}
}
- // Write register masks table.
- for (size_t i = 0; i < encoding.register_mask.num_entries; ++i) {
- BitMemoryRegion register_mask = code_info.GetRegisterMask(i, encoding);
- register_mask.StoreBits(0, register_masks_[i], encoding.register_mask.encoding.BitSize());
- }
+ return UnsignedLeb128Size(out_.size()) + out_.size();
+}
+
+void StackMapStream::FillInCodeInfo(MemoryRegion region) {
+ DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
+ DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before FillIn";
+ DCHECK_EQ(region.size(), UnsignedLeb128Size(out_.size()) + out_.size());
+
+ uint8_t* ptr = EncodeUnsignedLeb128(region.begin(), out_.size());
+ region.CopyFromVector(ptr - region.begin(), out_);
// Verify all written data in debug build.
if (kIsDebugBuild) {
@@ -527,7 +425,6 @@ void StackMapStream::CheckDexRegisterMap(const CodeInfo& code_info,
size_t num_dex_registers,
BitVector* live_dex_registers_mask,
size_t dex_register_locations_index) const {
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
for (size_t reg = 0; reg < num_dex_registers; reg++) {
// Find the location we tried to encode.
DexRegisterLocation expected = DexRegisterLocation::None();
@@ -542,7 +439,7 @@ void StackMapStream::CheckDexRegisterMap(const CodeInfo& code_info,
} else {
DCHECK(dex_register_map.IsDexRegisterLive(reg));
DexRegisterLocation seen = dex_register_map.GetDexRegisterLocation(
- reg, num_dex_registers, code_info, encoding);
+ reg, num_dex_registers, code_info);
DCHECK_EQ(expected.GetKind(), seen.GetKind());
DCHECK_EQ(expected.GetValue(), seen.GetValue());
}
@@ -600,8 +497,9 @@ size_t StackMapStream::PrepareStackMasks(size_t entry_size_in_bits) {
for (StackMapEntry& stack_map : stack_maps_) {
size_t index = dedup.size();
MemoryRegion stack_mask(stack_masks_.data() + index * byte_entry_size, byte_entry_size);
+ BitMemoryRegion stack_mask_bits(stack_mask);
for (size_t i = 0; i < entry_size_in_bits; i++) {
- stack_mask.StoreBit(i, stack_map.sp_mask != nullptr && stack_map.sp_mask->IsBitSet(i));
+ stack_mask_bits.StoreBit(i, stack_map.sp_mask != nullptr && stack_map.sp_mask->IsBitSet(i));
}
stack_map.stack_mask_index = dedup.emplace(stack_mask, index).first->second;
}
@@ -611,23 +509,23 @@ size_t StackMapStream::PrepareStackMasks(size_t entry_size_in_bits) {
// Check that all StackMapStream inputs are correctly encoded by trying to read them back.
void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- DCHECK_EQ(code_info.GetNumberOfStackMaps(encoding), stack_maps_.size());
+ DCHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
+ DCHECK_EQ(code_info.GetNumberOfStackMaskBits(), static_cast<uint32_t>(stack_mask_max_ + 1));
+ DCHECK_EQ(code_info.GetNumberOfLocationCatalogEntries(), location_catalog_entries_.size());
size_t invoke_info_index = 0;
for (size_t s = 0; s < stack_maps_.size(); ++s) {
- const StackMap stack_map = code_info.GetStackMapAt(s, encoding);
- const StackMapEncoding& stack_map_encoding = encoding.stack_map.encoding;
+ const StackMap stack_map = code_info.GetStackMapAt(s);
StackMapEntry entry = stack_maps_[s];
// Check main stack map fields.
- DCHECK_EQ(stack_map.GetNativePcOffset(stack_map_encoding, instruction_set_),
+ DCHECK_EQ(stack_map.GetNativePcOffset(instruction_set_),
entry.native_pc_code_offset.Uint32Value(instruction_set_));
- DCHECK_EQ(stack_map.GetDexPc(stack_map_encoding), entry.dex_pc);
- DCHECK_EQ(stack_map.GetRegisterMaskIndex(stack_map_encoding), entry.register_mask_index);
- DCHECK_EQ(code_info.GetRegisterMaskOf(encoding, stack_map), entry.register_mask);
- const size_t num_stack_mask_bits = code_info.GetNumberOfStackMaskBits(encoding);
- DCHECK_EQ(stack_map.GetStackMaskIndex(stack_map_encoding), entry.stack_mask_index);
- BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, stack_map);
+ DCHECK_EQ(stack_map.GetDexPc(), entry.dex_pc);
+ DCHECK_EQ(stack_map.GetRegisterMaskIndex(), entry.register_mask_index);
+ DCHECK_EQ(code_info.GetRegisterMaskOf(stack_map), entry.register_mask);
+ const size_t num_stack_mask_bits = code_info.GetNumberOfStackMaskBits();
+ DCHECK_EQ(stack_map.GetStackMaskIndex(), entry.stack_mask_index);
+ BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
if (entry.sp_mask != nullptr) {
DCHECK_GE(stack_mask.size_in_bits(), entry.sp_mask->GetNumberOfBits());
for (size_t b = 0; b < num_stack_mask_bits; b++) {
@@ -639,38 +537,36 @@ void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
}
}
if (entry.dex_method_index != dex::kDexNoIndex) {
- InvokeInfo invoke_info = code_info.GetInvokeInfo(encoding, invoke_info_index);
- DCHECK_EQ(invoke_info.GetNativePcOffset(encoding.invoke_info.encoding, instruction_set_),
+ InvokeInfo invoke_info = code_info.GetInvokeInfo(invoke_info_index);
+ DCHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_),
entry.native_pc_code_offset.Uint32Value(instruction_set_));
- DCHECK_EQ(invoke_info.GetInvokeType(encoding.invoke_info.encoding), entry.invoke_type);
- DCHECK_EQ(invoke_info.GetMethodIndexIdx(encoding.invoke_info.encoding),
- entry.dex_method_index_idx);
+ DCHECK_EQ(invoke_info.GetInvokeType(), entry.invoke_type);
+ DCHECK_EQ(invoke_info.GetMethodIndexIdx(), entry.dex_method_index_idx);
invoke_info_index++;
}
CheckDexRegisterMap(code_info,
code_info.GetDexRegisterMapOf(
- stack_map, encoding, entry.dex_register_entry.num_dex_registers),
+ stack_map, entry.dex_register_entry.num_dex_registers),
entry.dex_register_entry.num_dex_registers,
entry.dex_register_entry.live_dex_registers_mask,
entry.dex_register_entry.locations_start_index);
// Check inline info.
- DCHECK_EQ(stack_map.HasInlineInfo(stack_map_encoding), (entry.inlining_depth != 0));
+ DCHECK_EQ(stack_map.HasInlineInfo(), (entry.inlining_depth != 0));
if (entry.inlining_depth != 0) {
- InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
- DCHECK_EQ(inline_info.GetDepth(encoding.inline_info.encoding), entry.inlining_depth);
+ InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
+ DCHECK_EQ(inline_info.GetDepth(), entry.inlining_depth);
for (size_t d = 0; d < entry.inlining_depth; ++d) {
size_t inline_info_index = entry.inline_infos_start_index + d;
DCHECK_LT(inline_info_index, inline_infos_.size());
InlineInfoEntry inline_entry = inline_infos_[inline_info_index];
- DCHECK_EQ(inline_info.GetDexPcAtDepth(encoding.inline_info.encoding, d),
- inline_entry.dex_pc);
- if (inline_info.EncodesArtMethodAtDepth(encoding.inline_info.encoding, d)) {
- DCHECK_EQ(inline_info.GetArtMethodAtDepth(encoding.inline_info.encoding, d),
+ DCHECK_EQ(inline_info.GetDexPcAtDepth(d), inline_entry.dex_pc);
+ if (inline_info.EncodesArtMethodAtDepth(d)) {
+ DCHECK_EQ(inline_info.GetArtMethodAtDepth(d),
inline_entry.method);
} else {
const size_t method_index_idx =
- inline_info.GetMethodIndexIdxAtDepth(encoding.inline_info.encoding, d);
+ inline_info.GetMethodIndexIdxAtDepth(d);
DCHECK_EQ(method_index_idx, inline_entry.dex_method_index_idx);
DCHECK_EQ(method_indices_[method_index_idx], inline_entry.method_index);
}
@@ -679,7 +575,6 @@ void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
code_info.GetDexRegisterMapAtDepth(
d,
inline_info,
- encoding,
inline_entry.dex_register_entry.num_dex_registers),
inline_entry.dex_register_entry.num_dex_registers,
inline_entry.dex_register_entry.live_dex_registers_mask,
@@ -690,7 +585,7 @@ void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
}
size_t StackMapStream::ComputeMethodInfoSize() const {
- DCHECK_NE(0u, needed_size_) << "PrepareForFillIn not called before " << __FUNCTION__;
+ DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before " << __FUNCTION__;
return MethodInfo::ComputeSize(method_indices_.size());
}
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 268e9bd6e0..ea97cf6530 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -73,36 +73,32 @@ class StackMapStream : public ValueObject {
method_indices_(allocator->Adapter(kArenaAllocStackMapStream)),
dex_register_entries_(allocator->Adapter(kArenaAllocStackMapStream)),
stack_mask_max_(-1),
- dex_pc_max_(kNoDexPc),
- register_mask_max_(0),
- number_of_stack_maps_with_inline_info_(0),
+ out_(allocator->Adapter(kArenaAllocStackMapStream)),
dex_map_hash_to_stack_map_indices_(std::less<uint32_t>(),
allocator->Adapter(kArenaAllocStackMapStream)),
current_entry_(),
current_inline_info_(),
- code_info_encoding_(allocator->Adapter(kArenaAllocStackMapStream)),
- needed_size_(0),
current_dex_register_(0),
in_inline_frame_(false) {
stack_maps_.reserve(10);
+ out_.reserve(64);
location_catalog_entries_.reserve(4);
dex_register_locations_.reserve(10 * 4);
inline_infos_.reserve(2);
- code_info_encoding_.reserve(16);
}
// A dex register map entry for a single stack map entry, contains what registers are live as
// well as indices into the location catalog.
class DexRegisterMapEntry {
public:
- static const size_t kOffsetUnassigned = -1;
+ static const uint32_t kOffsetUnassigned = -1;
BitVector* live_dex_registers_mask;
uint32_t num_dex_registers;
size_t locations_start_index;
// Computed fields
size_t hash = 0;
- size_t offset = kOffsetUnassigned;
+ uint32_t offset = kOffsetUnassigned;
size_t ComputeSize(size_t catalog_size) const;
};
@@ -113,7 +109,7 @@ class StackMapStream : public ValueObject {
CodeOffset native_pc_code_offset;
uint32_t register_mask;
BitVector* sp_mask;
- uint8_t inlining_depth;
+ uint32_t inlining_depth;
size_t inline_infos_start_index;
uint32_t stack_mask_index;
uint32_t register_mask_index;
@@ -174,11 +170,6 @@ class StackMapStream : public ValueObject {
private:
size_t ComputeDexRegisterLocationCatalogSize() const;
- size_t ComputeDexRegisterMapsSize() const;
- void ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
- size_t dex_register_maps_bytes);
-
- CodeOffset ComputeMaxNativePcCodeOffset() const;
// Returns the number of unique stack masks.
size_t PrepareStackMasks(size_t entry_size_in_bits);
@@ -197,24 +188,11 @@ class StackMapStream : public ValueObject {
bool DexRegisterMapEntryEquals(const DexRegisterMapEntry& a, const DexRegisterMapEntry& b) const;
// Fill in the corresponding entries of a register map.
- void ComputeInvokeInfoEncoding(CodeInfoEncoding* encoding);
-
- // Returns the index of an entry with the same dex register map as the current_entry,
- // or kNoSameDexMapFound if no such entry exists.
- size_t FindEntryWithTheSameDexMap();
- bool HaveTheSameDexMaps(const StackMapEntry& a, const StackMapEntry& b) const;
-
- // Fill in the corresponding entries of a register map.
void FillInDexRegisterMap(DexRegisterMap dex_register_map,
uint32_t num_dex_registers,
const BitVector& live_dex_registers_mask,
uint32_t start_index_in_dex_register_locations) const;
- // Returns the offset for the dex register inside of the dex register location region. See FillIn.
- // Only copies the dex register map if the offset for the entry is not already assigned.
- size_t MaybeCopyDexRegisterMap(DexRegisterMapEntry& entry,
- size_t* current_offset,
- MemoryRegion dex_register_locations_region);
void CheckDexRegisterMap(const CodeInfo& code_info,
const DexRegisterMap& dex_register_map,
size_t num_dex_registers,
@@ -244,21 +222,16 @@ class StackMapStream : public ValueObject {
ScopedArenaVector<uint32_t> method_indices_;
ScopedArenaVector<DexRegisterMapEntry> dex_register_entries_;
int stack_mask_max_;
- uint32_t dex_pc_max_;
- uint32_t register_mask_max_;
- size_t number_of_stack_maps_with_inline_info_;
+
+ ScopedArenaVector<uint8_t> out_;
ScopedArenaSafeMap<uint32_t, ScopedArenaVector<uint32_t>> dex_map_hash_to_stack_map_indices_;
StackMapEntry current_entry_;
InlineInfoEntry current_inline_info_;
- ScopedArenaVector<uint8_t> code_info_encoding_;
- size_t needed_size_;
uint32_t current_dex_register_;
bool in_inline_frame_;
- static constexpr uint32_t kNoSameDexMapFound = -1;
-
DISALLOW_COPY_AND_ASSIGN(StackMapStream);
};
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index e36c592662..9db7588b3a 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -29,14 +29,13 @@ namespace art {
// to the given bit vector. Returns true if they are same.
static bool CheckStackMask(
const CodeInfo& code_info,
- const CodeInfoEncoding& encoding,
const StackMap& stack_map,
const BitVector& bit_vector) {
- BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, stack_map);
- if (bit_vector.GetNumberOfBits() > encoding.stack_mask.encoding.BitSize()) {
+ BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
+ if (bit_vector.GetNumberOfBits() > code_info.GetNumberOfStackMaskBits()) {
return false;
}
- for (size_t i = 0; i < encoding.stack_mask.encoding.BitSize(); ++i) {
+ for (size_t i = 0; i < code_info.GetNumberOfStackMaskBits(); ++i) {
if (stack_mask.LoadBit(i) != bit_vector.IsBitSet(i)) {
return false;
}
@@ -65,30 +64,29 @@ TEST(StackMapTest, Test1) {
stream.FillInCodeInfo(region);
CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- ASSERT_EQ(1u, code_info.GetNumberOfStackMaps(encoding));
+ ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
- uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(encoding);
+ uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(2u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding);
+ DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
// The Dex register location catalog contains:
// - one 1-byte short Dex register location, and
// - one 5-byte large Dex register location.
size_t expected_location_catalog_size = 1u + 5u;
ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
- StackMap stack_map = code_info.GetStackMapAt(0, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
+ StackMap stack_map = code_info.GetStackMapAt(0);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_EQ(0u, stack_map.GetDexPc());
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
- ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask));
+ ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_register_map =
- code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
+ code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
@@ -99,16 +97,16 @@ TEST(StackMapTest, Test1) {
ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(
- 0, number_of_dex_registers, code_info, encoding));
- ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
+ ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info));
size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
0, number_of_dex_registers, number_of_catalog_entries);
@@ -125,7 +123,7 @@ TEST(StackMapTest, Test1) {
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo());
}
TEST(StackMapTest, Test2) {
@@ -179,12 +177,11 @@ TEST(StackMapTest, Test2) {
stream.FillInCodeInfo(region);
CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- ASSERT_EQ(4u, code_info.GetNumberOfStackMaps(encoding));
+ ASSERT_EQ(4u, code_info.GetNumberOfStackMaps());
- uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(encoding);
+ uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(7u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding);
+ DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
// The Dex register location catalog contains:
// - six 1-byte short Dex register locations, and
// - one 5-byte large Dex register location.
@@ -193,18 +190,18 @@ TEST(StackMapTest, Test2) {
// First stack map.
{
- StackMap stack_map = code_info.GetStackMapAt(0, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
+ StackMap stack_map = code_info.GetStackMapAt(0);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_EQ(0u, stack_map.GetDexPc());
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
- ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask1));
+ ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_register_map =
- code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
+ code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
@@ -215,16 +212,16 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(
- 0, number_of_dex_registers, code_info, encoding));
- ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
+ ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info));
size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
0, number_of_dex_registers, number_of_catalog_entries);
@@ -241,29 +238,29 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
- ASSERT_TRUE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
- InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
- ASSERT_EQ(2u, inline_info.GetDepth(encoding.inline_info.encoding));
- ASSERT_EQ(3u, inline_info.GetDexPcAtDepth(encoding.inline_info.encoding, 0));
- ASSERT_EQ(2u, inline_info.GetDexPcAtDepth(encoding.inline_info.encoding, 1));
- ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 0));
- ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 1));
+ ASSERT_TRUE(stack_map.HasInlineInfo());
+ InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
+ ASSERT_EQ(2u, inline_info.GetDepth());
+ ASSERT_EQ(3u, inline_info.GetDexPcAtDepth(0));
+ ASSERT_EQ(2u, inline_info.GetDexPcAtDepth(1));
+ ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(0));
+ ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(1));
}
// Second stack map.
{
- StackMap stack_map = code_info.GetStackMapAt(1, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u, encoding)));
- ASSERT_EQ(1u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(128u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0xFFu, code_info.GetRegisterMaskOf(encoding, stack_map));
+ StackMap stack_map = code_info.GetStackMapAt(1);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u)));
+ ASSERT_EQ(1u, stack_map.GetDexPc());
+ ASSERT_EQ(128u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0xFFu, code_info.GetRegisterMaskOf(stack_map));
- ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask2));
+ ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask2));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_register_map =
- code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
+ code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
@@ -274,17 +271,17 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationInternalKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationInternalKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(18, dex_register_map.GetMachineRegister(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(3, dex_register_map.GetMachineRegister(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
0, number_of_dex_registers, number_of_catalog_entries);
@@ -301,23 +298,23 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(18, location0.GetValue());
ASSERT_EQ(3, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo());
}
// Third stack map.
{
- StackMap stack_map = code_info.GetStackMapAt(2, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(2u, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u, encoding)));
- ASSERT_EQ(2u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(192u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0xABu, code_info.GetRegisterMaskOf(encoding, stack_map));
+ StackMap stack_map = code_info.GetStackMapAt(2);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(2u)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u)));
+ ASSERT_EQ(2u, stack_map.GetDexPc());
+ ASSERT_EQ(192u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0xABu, code_info.GetRegisterMaskOf(stack_map));
- ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask3));
+ ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask3));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_register_map =
- code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
+ code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
@@ -328,17 +325,17 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInRegisterHigh, dex_register_map.GetLocationKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationInternalKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInRegisterHigh, dex_register_map.GetLocationInternalKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(6, dex_register_map.GetMachineRegister(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(8, dex_register_map.GetMachineRegister(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
0, number_of_dex_registers, number_of_catalog_entries);
@@ -355,23 +352,23 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(6, location0.GetValue());
ASSERT_EQ(8, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo());
}
// Fourth stack map.
{
- StackMap stack_map = code_info.GetStackMapAt(3, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(3u, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u, encoding)));
- ASSERT_EQ(3u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(256u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0xCDu, code_info.GetRegisterMaskOf(encoding, stack_map));
+ StackMap stack_map = code_info.GetStackMapAt(3);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(3u)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u)));
+ ASSERT_EQ(3u, stack_map.GetDexPc());
+ ASSERT_EQ(256u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0xCDu, code_info.GetRegisterMaskOf(stack_map));
- ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask4));
+ ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask4));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_register_map =
- code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
+ code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
@@ -382,17 +379,17 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInFpuRegisterHigh, dex_register_map.GetLocationKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationInternalKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInFpuRegisterHigh, dex_register_map.GetLocationInternalKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(3, dex_register_map.GetMachineRegister(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(1, dex_register_map.GetMachineRegister(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
0, number_of_dex_registers, number_of_catalog_entries);
@@ -409,7 +406,7 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(3, location0.GetValue());
ASSERT_EQ(1, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo());
}
}
@@ -440,12 +437,11 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
stream.FillInCodeInfo(region);
CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- ASSERT_EQ(1u, code_info.GetNumberOfStackMaps(encoding));
+ ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
- uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(encoding);
+ uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(2u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding);
+ DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
// The Dex register location catalog contains:
// - one 1-byte short Dex register locations, and
// - one 5-byte large Dex register location.
@@ -454,17 +450,17 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
// First stack map.
{
- StackMap stack_map = code_info.GetStackMapAt(0, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
+ StackMap stack_map = code_info.GetStackMapAt(0);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_EQ(0u, stack_map.GetDexPc());
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
- ASSERT_TRUE(CheckStackMask(code_info, encoding, stack_map, sp_mask1));
+ ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
- DexRegisterMap map(code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
+ DexRegisterMap map(code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers));
ASSERT_TRUE(map.IsDexRegisterLive(0));
ASSERT_TRUE(map.IsDexRegisterLive(1));
ASSERT_EQ(2u, map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
@@ -474,15 +470,15 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
size_t expected_map_size = 1u + 1u;
ASSERT_EQ(expected_map_size, map.Size());
- ASSERT_EQ(Kind::kInStack, map.GetLocationKind(0, number_of_dex_registers, code_info, encoding));
+ ASSERT_EQ(Kind::kInStack, map.GetLocationKind(0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kConstant,
- map.GetLocationKind(1, number_of_dex_registers, code_info, encoding));
+ map.GetLocationKind(1, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kInStack,
- map.GetLocationInternalKind(0, number_of_dex_registers, code_info, encoding));
+ map.GetLocationInternalKind(0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kConstantLargeValue,
- map.GetLocationInternalKind(1, number_of_dex_registers, code_info, encoding));
- ASSERT_EQ(0, map.GetStackOffsetInBytes(0, number_of_dex_registers, code_info, encoding));
- ASSERT_EQ(-2, map.GetConstant(1, number_of_dex_registers, code_info, encoding));
+ map.GetLocationInternalKind(1, number_of_dex_registers, code_info));
+ ASSERT_EQ(0, map.GetStackOffsetInBytes(0, number_of_dex_registers, code_info));
+ ASSERT_EQ(-2, map.GetConstant(1, number_of_dex_registers, code_info));
const size_t index0 =
map.GetLocationCatalogEntryIndex(0, number_of_dex_registers, number_of_catalog_entries);
@@ -501,10 +497,10 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
// Test that the inline info dex register map deduplicated to the same offset as the stack map
// one.
- ASSERT_TRUE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
- InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
- EXPECT_EQ(inline_info.GetDexRegisterMapOffsetAtDepth(encoding.inline_info.encoding, 0),
- stack_map.GetDexRegisterMapOffset(encoding.stack_map.encoding));
+ ASSERT_TRUE(stack_map.HasInlineInfo());
+ InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
+ EXPECT_EQ(inline_info.GetDexRegisterMapOffsetAtDepth(0),
+ stack_map.GetDexRegisterMapOffset());
}
}
@@ -527,27 +523,26 @@ TEST(StackMapTest, TestNonLiveDexRegisters) {
stream.FillInCodeInfo(region);
CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- ASSERT_EQ(1u, code_info.GetNumberOfStackMaps(encoding));
+ ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
- uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(encoding);
+ uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(1u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding);
+ DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
// The Dex register location catalog contains:
// - one 5-byte large Dex register location.
size_t expected_location_catalog_size = 5u;
ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
- StackMap stack_map = code_info.GetStackMapAt(0, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
+ StackMap stack_map = code_info.GetStackMapAt(0);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_EQ(0u, stack_map.GetDexPc());
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
- ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_register_map =
- code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
+ code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_FALSE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
@@ -558,14 +553,14 @@ TEST(StackMapTest, TestNonLiveDexRegisters) {
ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(
- 1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationInternalKind(
- 0, number_of_dex_registers, code_info, encoding));
+ 0, number_of_dex_registers, code_info));
ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(
- 1, number_of_dex_registers, code_info, encoding));
- ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info, encoding));
+ 1, number_of_dex_registers, code_info));
+ ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info));
size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
0, number_of_dex_registers, number_of_catalog_entries);
@@ -582,7 +577,7 @@ TEST(StackMapTest, TestNonLiveDexRegisters) {
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
+ ASSERT_FALSE(stack_map.HasInlineInfo());
}
// Generate a stack map whose dex register offset is
@@ -620,11 +615,10 @@ TEST(StackMapTest, DexRegisterMapOffsetOverflow) {
stream.FillInCodeInfo(region);
CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
// The location catalog contains two entries (DexRegisterLocation(kConstant, 0)
// and DexRegisterLocation(kConstant, 1)), therefore the location catalog index
// has a size of 1 bit.
- uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(encoding);
+ uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(2u, number_of_catalog_entries);
ASSERT_EQ(1u, DexRegisterMap::SingleEntrySizeInBits(number_of_catalog_entries));
@@ -635,21 +629,21 @@ TEST(StackMapTest, DexRegisterMapOffsetOverflow) {
// locations (that is, 127 bytes of data).
// Hence it has a size of 255 bytes, and therefore...
ASSERT_EQ(128u, DexRegisterMap::GetLiveBitMaskSize(number_of_dex_registers));
- StackMap stack_map0 = code_info.GetStackMapAt(0, encoding);
+ StackMap stack_map0 = code_info.GetStackMapAt(0);
DexRegisterMap dex_register_map0 =
- code_info.GetDexRegisterMapOf(stack_map0, encoding, number_of_dex_registers);
+ code_info.GetDexRegisterMapOf(stack_map0, number_of_dex_registers);
ASSERT_EQ(127u, dex_register_map0.GetLocationMappingDataSize(number_of_dex_registers,
number_of_catalog_entries));
ASSERT_EQ(255u, dex_register_map0.Size());
- StackMap stack_map1 = code_info.GetStackMapAt(1, encoding);
- ASSERT_TRUE(stack_map1.HasDexRegisterMap(encoding.stack_map.encoding));
+ StackMap stack_map1 = code_info.GetStackMapAt(1);
+ ASSERT_TRUE(stack_map1.HasDexRegisterMap());
// ...the offset of the second Dex register map (relative to the
// beginning of the Dex register maps region) is 255 (i.e.,
// kNoDexRegisterMapSmallEncoding).
- ASSERT_NE(stack_map1.GetDexRegisterMapOffset(encoding.stack_map.encoding),
- StackMap::kNoDexRegisterMap);
- ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(encoding.stack_map.encoding), 0xFFu);
+ ASSERT_NE(stack_map1.GetDexRegisterMapOffset(),
+ StackMap::kNoValue);
+ ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(), 0xFFu);
}
TEST(StackMapTest, TestShareDexRegisterMap) {
@@ -682,33 +676,32 @@ TEST(StackMapTest, TestShareDexRegisterMap) {
stream.FillInCodeInfo(region);
CodeInfo ci(region);
- CodeInfoEncoding encoding = ci.ExtractEncoding();
// Verify first stack map.
- StackMap sm0 = ci.GetStackMapAt(0, encoding);
- DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, encoding, number_of_dex_registers);
- ASSERT_EQ(0, dex_registers0.GetMachineRegister(0, number_of_dex_registers, ci, encoding));
- ASSERT_EQ(-2, dex_registers0.GetConstant(1, number_of_dex_registers, ci, encoding));
+ StackMap sm0 = ci.GetStackMapAt(0);
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, number_of_dex_registers);
+ ASSERT_EQ(0, dex_registers0.GetMachineRegister(0, number_of_dex_registers, ci));
+ ASSERT_EQ(-2, dex_registers0.GetConstant(1, number_of_dex_registers, ci));
// Verify second stack map.
- StackMap sm1 = ci.GetStackMapAt(1, encoding);
- DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1, encoding, number_of_dex_registers);
- ASSERT_EQ(0, dex_registers1.GetMachineRegister(0, number_of_dex_registers, ci, encoding));
- ASSERT_EQ(-2, dex_registers1.GetConstant(1, number_of_dex_registers, ci, encoding));
+ StackMap sm1 = ci.GetStackMapAt(1);
+ DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1, number_of_dex_registers);
+ ASSERT_EQ(0, dex_registers1.GetMachineRegister(0, number_of_dex_registers, ci));
+ ASSERT_EQ(-2, dex_registers1.GetConstant(1, number_of_dex_registers, ci));
// Verify third stack map.
- StackMap sm2 = ci.GetStackMapAt(2, encoding);
- DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2, encoding, number_of_dex_registers);
- ASSERT_EQ(2, dex_registers2.GetMachineRegister(0, number_of_dex_registers, ci, encoding));
- ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers, ci, encoding));
+ StackMap sm2 = ci.GetStackMapAt(2);
+ DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2, number_of_dex_registers);
+ ASSERT_EQ(2, dex_registers2.GetMachineRegister(0, number_of_dex_registers, ci));
+ ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers, ci));
// Verify dex register map offsets.
- ASSERT_EQ(sm0.GetDexRegisterMapOffset(encoding.stack_map.encoding),
- sm1.GetDexRegisterMapOffset(encoding.stack_map.encoding));
- ASSERT_NE(sm0.GetDexRegisterMapOffset(encoding.stack_map.encoding),
- sm2.GetDexRegisterMapOffset(encoding.stack_map.encoding));
- ASSERT_NE(sm1.GetDexRegisterMapOffset(encoding.stack_map.encoding),
- sm2.GetDexRegisterMapOffset(encoding.stack_map.encoding));
+ ASSERT_EQ(sm0.GetDexRegisterMapOffset(),
+ sm1.GetDexRegisterMapOffset());
+ ASSERT_NE(sm0.GetDexRegisterMapOffset(),
+ sm2.GetDexRegisterMapOffset());
+ ASSERT_NE(sm1.GetDexRegisterMapOffset(),
+ sm2.GetDexRegisterMapOffset());
}
TEST(StackMapTest, TestNoDexRegisterMap) {
@@ -732,33 +725,32 @@ TEST(StackMapTest, TestNoDexRegisterMap) {
stream.FillInCodeInfo(region);
CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- ASSERT_EQ(2u, code_info.GetNumberOfStackMaps(encoding));
+ ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
- uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries(encoding);
+ uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(0u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog(encoding);
+ DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
ASSERT_EQ(0u, location_catalog.Size());
- StackMap stack_map = code_info.GetStackMapAt(0, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64, encoding)));
- ASSERT_EQ(0u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(encoding, stack_map));
-
- ASSERT_FALSE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
-
- stack_map = code_info.GetStackMapAt(1, encoding);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1, encoding)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68, encoding)));
- ASSERT_EQ(1u, stack_map.GetDexPc(encoding.stack_map.encoding));
- ASSERT_EQ(68u, stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA));
- ASSERT_EQ(0x4u, code_info.GetRegisterMaskOf(encoding, stack_map));
-
- ASSERT_FALSE(stack_map.HasDexRegisterMap(encoding.stack_map.encoding));
- ASSERT_FALSE(stack_map.HasInlineInfo(encoding.stack_map.encoding));
+ StackMap stack_map = code_info.GetStackMapAt(0);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_EQ(0u, stack_map.GetDexPc());
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
+
+ ASSERT_FALSE(stack_map.HasDexRegisterMap());
+ ASSERT_FALSE(stack_map.HasInlineInfo());
+
+ stack_map = code_info.GetStackMapAt(1);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68)));
+ ASSERT_EQ(1u, stack_map.GetDexPc());
+ ASSERT_EQ(68u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(0x4u, code_info.GetRegisterMaskOf(stack_map));
+
+ ASSERT_FALSE(stack_map.HasDexRegisterMap());
+ ASSERT_FALSE(stack_map.HasInlineInfo());
}
TEST(StackMapTest, InlineTest) {
@@ -835,100 +827,99 @@ TEST(StackMapTest, InlineTest) {
stream.FillInCodeInfo(region);
CodeInfo ci(region);
- CodeInfoEncoding encoding = ci.ExtractEncoding();
{
// Verify first stack map.
- StackMap sm0 = ci.GetStackMapAt(0, encoding);
-
- DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, encoding, 2);
- ASSERT_EQ(0, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding));
- ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci, encoding));
-
- InlineInfo if0 = ci.GetInlineInfoOf(sm0, encoding);
- ASSERT_EQ(2u, if0.GetDepth(encoding.inline_info.encoding));
- ASSERT_EQ(2u, if0.GetDexPcAtDepth(encoding.inline_info.encoding, 0));
- ASSERT_TRUE(if0.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 0));
- ASSERT_EQ(3u, if0.GetDexPcAtDepth(encoding.inline_info.encoding, 1));
- ASSERT_TRUE(if0.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 1));
-
- DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, encoding, 1);
- ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding));
-
- DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if0, encoding, 3);
- ASSERT_EQ(16, dex_registers2.GetStackOffsetInBytes(0, 3, ci, encoding));
- ASSERT_EQ(20, dex_registers2.GetConstant(1, 3, ci, encoding));
- ASSERT_EQ(15, dex_registers2.GetMachineRegister(2, 3, ci, encoding));
+ StackMap sm0 = ci.GetStackMapAt(0);
+
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, 2);
+ ASSERT_EQ(0, dex_registers0.GetStackOffsetInBytes(0, 2, ci));
+ ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci));
+
+ InlineInfo if0 = ci.GetInlineInfoOf(sm0);
+ ASSERT_EQ(2u, if0.GetDepth());
+ ASSERT_EQ(2u, if0.GetDexPcAtDepth(0));
+ ASSERT_TRUE(if0.EncodesArtMethodAtDepth(0));
+ ASSERT_EQ(3u, if0.GetDexPcAtDepth(1));
+ ASSERT_TRUE(if0.EncodesArtMethodAtDepth(1));
+
+ DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, 1);
+ ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci));
+
+ DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if0, 3);
+ ASSERT_EQ(16, dex_registers2.GetStackOffsetInBytes(0, 3, ci));
+ ASSERT_EQ(20, dex_registers2.GetConstant(1, 3, ci));
+ ASSERT_EQ(15, dex_registers2.GetMachineRegister(2, 3, ci));
}
{
// Verify second stack map.
- StackMap sm1 = ci.GetStackMapAt(1, encoding);
-
- DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1, encoding, 2);
- ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding));
- ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci, encoding));
-
- InlineInfo if1 = ci.GetInlineInfoOf(sm1, encoding);
- ASSERT_EQ(3u, if1.GetDepth(encoding.inline_info.encoding));
- ASSERT_EQ(2u, if1.GetDexPcAtDepth(encoding.inline_info.encoding, 0));
- ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 0));
- ASSERT_EQ(3u, if1.GetDexPcAtDepth(encoding.inline_info.encoding, 1));
- ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 1));
- ASSERT_EQ(5u, if1.GetDexPcAtDepth(encoding.inline_info.encoding, 2));
- ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 2));
-
- DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, encoding, 1);
- ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding));
-
- DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if1, encoding, 3);
- ASSERT_EQ(80, dex_registers2.GetStackOffsetInBytes(0, 3, ci, encoding));
- ASSERT_EQ(10, dex_registers2.GetConstant(1, 3, ci, encoding));
- ASSERT_EQ(5, dex_registers2.GetMachineRegister(2, 3, ci, encoding));
-
- ASSERT_FALSE(if1.HasDexRegisterMapAtDepth(encoding.inline_info.encoding, 2));
+ StackMap sm1 = ci.GetStackMapAt(1);
+
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1, 2);
+ ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci));
+ ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci));
+
+ InlineInfo if1 = ci.GetInlineInfoOf(sm1);
+ ASSERT_EQ(3u, if1.GetDepth());
+ ASSERT_EQ(2u, if1.GetDexPcAtDepth(0));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(0));
+ ASSERT_EQ(3u, if1.GetDexPcAtDepth(1));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(1));
+ ASSERT_EQ(5u, if1.GetDexPcAtDepth(2));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(2));
+
+ DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, 1);
+ ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci));
+
+ DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if1, 3);
+ ASSERT_EQ(80, dex_registers2.GetStackOffsetInBytes(0, 3, ci));
+ ASSERT_EQ(10, dex_registers2.GetConstant(1, 3, ci));
+ ASSERT_EQ(5, dex_registers2.GetMachineRegister(2, 3, ci));
+
+ ASSERT_FALSE(if1.HasDexRegisterMapAtDepth(2));
}
{
// Verify third stack map.
- StackMap sm2 = ci.GetStackMapAt(2, encoding);
+ StackMap sm2 = ci.GetStackMapAt(2);
- DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2, encoding, 2);
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2, 2);
ASSERT_FALSE(dex_registers0.IsDexRegisterLive(0));
- ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci, encoding));
- ASSERT_FALSE(sm2.HasInlineInfo(encoding.stack_map.encoding));
+ ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci));
+ ASSERT_FALSE(sm2.HasInlineInfo());
}
{
// Verify fourth stack map.
- StackMap sm3 = ci.GetStackMapAt(3, encoding);
+ StackMap sm3 = ci.GetStackMapAt(3);
- DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3, encoding, 2);
- ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci, encoding));
- ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci, encoding));
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3, 2);
+ ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci));
+ ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci));
- InlineInfo if2 = ci.GetInlineInfoOf(sm3, encoding);
- ASSERT_EQ(3u, if2.GetDepth(encoding.inline_info.encoding));
- ASSERT_EQ(2u, if2.GetDexPcAtDepth(encoding.inline_info.encoding, 0));
- ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 0));
- ASSERT_EQ(5u, if2.GetDexPcAtDepth(encoding.inline_info.encoding, 1));
- ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 1));
- ASSERT_EQ(10u, if2.GetDexPcAtDepth(encoding.inline_info.encoding, 2));
- ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info.encoding, 2));
+ InlineInfo if2 = ci.GetInlineInfoOf(sm3);
+ ASSERT_EQ(3u, if2.GetDepth());
+ ASSERT_EQ(2u, if2.GetDexPcAtDepth(0));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(0));
+ ASSERT_EQ(5u, if2.GetDexPcAtDepth(1));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(1));
+ ASSERT_EQ(10u, if2.GetDexPcAtDepth(2));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(2));
- ASSERT_FALSE(if2.HasDexRegisterMapAtDepth(encoding.inline_info.encoding, 0));
+ ASSERT_FALSE(if2.HasDexRegisterMapAtDepth(0));
- DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(1, if2, encoding, 1);
- ASSERT_EQ(2, dex_registers1.GetMachineRegister(0, 1, ci, encoding));
+ DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(1, if2, 1);
+ ASSERT_EQ(2, dex_registers1.GetMachineRegister(0, 1, ci));
- DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(2, if2, encoding, 2);
+ DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(2, if2, 2);
ASSERT_FALSE(dex_registers2.IsDexRegisterLive(0));
- ASSERT_EQ(3, dex_registers2.GetMachineRegister(1, 2, ci, encoding));
+ ASSERT_EQ(3, dex_registers2.GetMachineRegister(1, 2, ci));
}
}
TEST(StackMapTest, CodeOffsetTest) {
- // Test minimum alignments, encoding, and decoding.
+ // Test minimum alignments, and decoding.
CodeOffset offset_thumb2 =
CodeOffset::FromOffset(kThumb2InstructionAlignment, InstructionSet::kThumb2);
CodeOffset offset_arm64 =
@@ -969,13 +960,12 @@ TEST(StackMapTest, TestDeduplicateStackMask) {
stream.FillInCodeInfo(region);
CodeInfo code_info(region);
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- ASSERT_EQ(2u, code_info.GetNumberOfStackMaps(encoding));
+ ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
- StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4, encoding);
- StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8, encoding);
- EXPECT_EQ(stack_map1.GetStackMaskIndex(encoding.stack_map.encoding),
- stack_map2.GetStackMaskIndex(encoding.stack_map.encoding));
+ StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4);
+ StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8);
+ EXPECT_EQ(stack_map1.GetStackMaskIndex(),
+ stack_map2.GetStackMaskIndex());
}
TEST(StackMapTest, TestInvokeInfo) {
@@ -1007,26 +997,25 @@ TEST(StackMapTest, TestInvokeInfo) {
CodeInfo code_info(code_info_region);
MethodInfo method_info(method_info_region.begin());
- CodeInfoEncoding encoding = code_info.ExtractEncoding();
- ASSERT_EQ(3u, code_info.GetNumberOfStackMaps(encoding));
+ ASSERT_EQ(3u, code_info.GetNumberOfStackMaps());
- InvokeInfo invoke1(code_info.GetInvokeInfoForNativePcOffset(4, encoding));
- InvokeInfo invoke2(code_info.GetInvokeInfoForNativePcOffset(8, encoding));
- InvokeInfo invoke3(code_info.GetInvokeInfoForNativePcOffset(16, encoding));
- InvokeInfo invoke_invalid(code_info.GetInvokeInfoForNativePcOffset(12, encoding));
+ InvokeInfo invoke1(code_info.GetInvokeInfoForNativePcOffset(4));
+ InvokeInfo invoke2(code_info.GetInvokeInfoForNativePcOffset(8));
+ InvokeInfo invoke3(code_info.GetInvokeInfoForNativePcOffset(16));
+ InvokeInfo invoke_invalid(code_info.GetInvokeInfoForNativePcOffset(12));
EXPECT_FALSE(invoke_invalid.IsValid()); // No entry for that index.
EXPECT_TRUE(invoke1.IsValid());
EXPECT_TRUE(invoke2.IsValid());
EXPECT_TRUE(invoke3.IsValid());
- EXPECT_EQ(invoke1.GetInvokeType(encoding.invoke_info.encoding), kSuper);
- EXPECT_EQ(invoke1.GetMethodIndex(encoding.invoke_info.encoding, method_info), 1u);
- EXPECT_EQ(invoke1.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA), 4u);
- EXPECT_EQ(invoke2.GetInvokeType(encoding.invoke_info.encoding), kStatic);
- EXPECT_EQ(invoke2.GetMethodIndex(encoding.invoke_info.encoding, method_info), 3u);
- EXPECT_EQ(invoke2.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA), 8u);
- EXPECT_EQ(invoke3.GetInvokeType(encoding.invoke_info.encoding), kDirect);
- EXPECT_EQ(invoke3.GetMethodIndex(encoding.invoke_info.encoding, method_info), 65535u);
- EXPECT_EQ(invoke3.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA), 16u);
+ EXPECT_EQ(invoke1.GetInvokeType(), kSuper);
+ EXPECT_EQ(invoke1.GetMethodIndex(method_info), 1u);
+ EXPECT_EQ(invoke1.GetNativePcOffset(kRuntimeISA), 4u);
+ EXPECT_EQ(invoke2.GetInvokeType(), kStatic);
+ EXPECT_EQ(invoke2.GetMethodIndex(method_info), 3u);
+ EXPECT_EQ(invoke2.GetNativePcOffset(kRuntimeISA), 8u);
+ EXPECT_EQ(invoke3.GetInvokeType(), kDirect);
+ EXPECT_EQ(invoke3.GetMethodIndex(method_info), 65535u);
+ EXPECT_EQ(invoke3.GetNativePcOffset(kRuntimeISA), 16u);
}
} // namespace art