summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/bounds_check_elimination.cc1
-rw-r--r--compiler/optimizing/code_generator_arm64.cc4
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc4
-rw-r--r--compiler/optimizing/code_generator_mips.cc4
-rw-r--r--compiler/optimizing/code_generator_mips64.cc4
-rw-r--r--compiler/optimizing/code_generator_x86.cc4
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc4
-rw-r--r--compiler/optimizing/inliner.cc2
-rw-r--r--compiler/optimizing/load_store_elimination.cc5
-rw-r--r--compiler/optimizing/optimizing_cfi_test.cc1
-rw-r--r--compiler/optimizing/optimizing_compiler.cc8
-rw-r--r--compiler/optimizing/prepare_for_register_allocation.cc3
-rw-r--r--compiler/optimizing/stack_map_stream.cc19
-rw-r--r--compiler/optimizing/stack_map_stream.h2
14 files changed, 37 insertions, 28 deletions
diff --git a/compiler/optimizing/bounds_check_elimination.cc b/compiler/optimizing/bounds_check_elimination.cc
index 6b832da2bb..a170734ff2 100644
--- a/compiler/optimizing/bounds_check_elimination.cc
+++ b/compiler/optimizing/bounds_check_elimination.cc
@@ -596,6 +596,7 @@ class BCEVisitor : public HGraphVisitor {
// Helper method to assign a new range to an instruction in given basic block.
void AssignRange(HBasicBlock* basic_block, HInstruction* instruction, ValueRange* range) {
+ DCHECK(!range->IsMonotonicValueRange() || instruction->IsLoopHeaderPhi());
GetValueRangeMap(basic_block)->Overwrite(instruction->GetId(), range);
}
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 19e5d067a1..c61ef0a0bc 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -4641,7 +4641,7 @@ vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeMethodPatch(
MethodReference target_method,
vixl::aarch64::Label* adrp_label) {
return NewPcRelativePatch(*target_method.dex_file,
- target_method.dex_method_index,
+ target_method.index,
adrp_label,
&pc_relative_method_patches_);
}
@@ -4650,7 +4650,7 @@ vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
MethodReference target_method,
vixl::aarch64::Label* adrp_label) {
return NewPcRelativePatch(*target_method.dex_file,
- target_method.dex_method_index,
+ target_method.index,
adrp_label,
&method_bss_entry_patches_);
}
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 8b9495d564..6147259bd3 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -9119,14 +9119,14 @@ void CodeGeneratorARMVIXL::GenerateVirtualCall(
CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativeMethodPatch(
MethodReference target_method) {
return NewPcRelativePatch(*target_method.dex_file,
- target_method.dex_method_index,
+ target_method.index,
&pc_relative_method_patches_);
}
CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewMethodBssEntryPatch(
MethodReference target_method) {
return NewPcRelativePatch(*target_method.dex_file,
- target_method.dex_method_index,
+ target_method.index,
&method_bss_entry_patches_);
}
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index f0ef0071b6..9db2bd35ca 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -1683,7 +1683,7 @@ CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeMethodPa
MethodReference target_method,
const PcRelativePatchInfo* info_high) {
return NewPcRelativePatch(*target_method.dex_file,
- target_method.dex_method_index,
+ target_method.index,
info_high,
&pc_relative_method_patches_);
}
@@ -1692,7 +1692,7 @@ CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewMethodBssEntryPatc
MethodReference target_method,
const PcRelativePatchInfo* info_high) {
return NewPcRelativePatch(*target_method.dex_file,
- target_method.dex_method_index,
+ target_method.index,
info_high,
&method_bss_entry_patches_);
}
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index 201b1b065f..a27cbce3db 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -1592,7 +1592,7 @@ CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeMeth
MethodReference target_method,
const PcRelativePatchInfo* info_high) {
return NewPcRelativePatch(*target_method.dex_file,
- target_method.dex_method_index,
+ target_method.index,
info_high,
&pc_relative_method_patches_);
}
@@ -1601,7 +1601,7 @@ CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntry
MethodReference target_method,
const PcRelativePatchInfo* info_high) {
return NewPcRelativePatch(*target_method.dex_file,
- target_method.dex_method_index,
+ target_method.index,
info_high,
&method_bss_entry_patches_);
}
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index e45ad0a9a3..c153cf78da 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -4623,7 +4623,7 @@ void CodeGeneratorX86::RecordBootMethodPatch(HInvokeStaticOrDirect* invoke) {
invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
boot_image_method_patches_.emplace_back(address,
*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
+ invoke->GetTargetMethod().index);
__ Bind(&boot_image_method_patches_.back().label);
}
@@ -4633,7 +4633,7 @@ Label* CodeGeneratorX86::NewMethodBssEntryPatch(
// Add the patch entry and bind its label at the end of the instruction.
method_bss_entry_patches_.emplace_back(method_address,
*target_method.dex_file,
- target_method.dex_method_index);
+ target_method.index);
return &method_bss_entry_patches_.back().label;
}
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 8c4374d71e..bbf05a70d6 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1068,13 +1068,13 @@ void CodeGeneratorX86_64::GenerateVirtualCall(
void CodeGeneratorX86_64::RecordBootMethodPatch(HInvokeStaticOrDirect* invoke) {
boot_image_method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
+ invoke->GetTargetMethod().index);
__ Bind(&boot_image_method_patches_.back().label);
}
Label* CodeGeneratorX86_64::NewMethodBssEntryPatch(MethodReference target_method) {
// Add a patch entry and return the label.
- method_bss_entry_patches_.emplace_back(*target_method.dex_file, target_method.dex_method_index);
+ method_bss_entry_patches_.emplace_back(*target_method.dex_file, target_method.index);
return &method_bss_entry_patches_.back().label;
}
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 6567a3a445..793e781bae 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -1236,7 +1236,7 @@ bool HInliner::TryInlineAndReplace(HInvoke* invoke_instruction,
const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile();
uint32_t dex_method_index = FindMethodIndexIn(
method, caller_dex_file, invoke_instruction->GetDexMethodIndex());
- if (dex_method_index == DexFile::kDexNoIndex) {
+ if (dex_method_index == dex::kDexNoIndex) {
return false;
}
HInvokeVirtual* new_invoke = new (graph_->GetArena()) HInvokeVirtual(
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index 98b859210c..8a9acf108c 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -670,6 +670,11 @@ void LoadStoreElimination::Run() {
return;
}
+ // TODO: analyze VecLoad/VecStore better.
+ if (graph_->HasSIMD()) {
+ return;
+ }
+
LSEVisitor lse_visitor(graph_, heap_location_collector, side_effects_, stats_);
for (HBasicBlock* block : graph_->GetReversePostOrder()) {
lse_visitor.VisitBasicBlock(block);
diff --git a/compiler/optimizing/optimizing_cfi_test.cc b/compiler/optimizing/optimizing_cfi_test.cc
index c24f1de93d..99d5284714 100644
--- a/compiler/optimizing/optimizing_cfi_test.cc
+++ b/compiler/optimizing/optimizing_cfi_test.cc
@@ -23,6 +23,7 @@
#include "gtest/gtest.h"
#include "optimizing/code_generator.h"
#include "optimizing/optimizing_unit_test.h"
+#include "read_barrier_config.h"
#include "utils/arm/assembler_arm_vixl.h"
#include "utils/assembler.h"
#include "utils/mips/assembler_mips.h"
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 399cd98983..e128a15cfd 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -434,9 +434,9 @@ bool OptimizingCompiler::CanCompileMethod(uint32_t method_idx ATTRIBUTE_UNUSED,
}
static bool IsInstructionSetSupported(InstructionSet instruction_set) {
- return (instruction_set == kArm && !kArm32QuickCodeUseSoftFloat)
+ return instruction_set == kArm
|| instruction_set == kArm64
- || (instruction_set == kThumb2 && !kArm32QuickCodeUseSoftFloat)
+ || instruction_set == kThumb2
|| instruction_set == kMips
|| instruction_set == kMips64
|| instruction_set == kX86
@@ -1269,8 +1269,8 @@ bool OptimizingCompiler::JitCompile(Thread* self,
if (compiler_options.GetGenerateDebugInfo()) {
const auto* method_header = reinterpret_cast<const OatQuickMethodHeader*>(code);
const uintptr_t code_address = reinterpret_cast<uintptr_t>(method_header->GetCode());
- debug::MethodDebugInfo info = debug::MethodDebugInfo();
- info.trampoline_name = nullptr;
+ debug::MethodDebugInfo info = {};
+ DCHECK(info.trampoline_name.empty());
info.dex_file = dex_file;
info.class_def_index = class_def_idx;
info.dex_method_index = method_idx;
diff --git a/compiler/optimizing/prepare_for_register_allocation.cc b/compiler/optimizing/prepare_for_register_allocation.cc
index 5de707a50f..2c856cd3d9 100644
--- a/compiler/optimizing/prepare_for_register_allocation.cc
+++ b/compiler/optimizing/prepare_for_register_allocation.cc
@@ -16,6 +16,7 @@
#include "prepare_for_register_allocation.h"
+#include "dex_file_types.h"
#include "jni_internal.h"
#include "optimizing_compiler_stats.h"
#include "well_known_classes.h"
@@ -59,7 +60,7 @@ void PrepareForRegisterAllocation::VisitBoundsCheck(HBoundsCheck* check) {
HEnvironment* environment = new (arena) HEnvironment(arena,
/* number_of_vregs */ 0u,
char_at_method,
- /* dex_pc */ DexFile::kDexNoIndex,
+ /* dex_pc */ dex::kDexNoIndex,
check);
check->InsertRawEnvironment(environment);
}
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index b7840d73db..7eb2188a28 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -18,6 +18,7 @@
#include "art_method-inl.h"
#include "base/stl_util.h"
+#include "dex_file_types.h"
#include "optimizing/optimizing_compiler.h"
#include "runtime.h"
#include "scoped_thread_state_change-inl.h"
@@ -39,7 +40,7 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
current_entry_.inlining_depth = inlining_depth;
current_entry_.inline_infos_start_index = inline_infos_.size();
current_entry_.stack_mask_index = 0;
- current_entry_.dex_method_index = DexFile::kDexNoIndex;
+ current_entry_.dex_method_index = dex::kDexNoIndex;
current_entry_.dex_register_entry.num_dex_registers = num_dex_registers;
current_entry_.dex_register_entry.locations_start_index = dex_register_locations_.size();
current_entry_.dex_register_entry.live_dex_registers_mask = (num_dex_registers != 0)
@@ -226,7 +227,7 @@ void StackMapStream::ComputeInvokeInfoEncoding(CodeInfoEncoding* encoding) {
size_t invoke_infos_count = 0;
size_t invoke_type_max = 0;
for (const StackMapEntry& entry : stack_maps_) {
- if (entry.dex_method_index != DexFile::kDexNoIndex) {
+ if (entry.dex_method_index != dex::kDexNoIndex) {
native_pc_max = std::max(native_pc_max, entry.native_pc_code_offset.CompressedValue());
method_index_max = std::max(method_index_max, static_cast<uint16_t>(entry.dex_method_index));
invoke_type_max = std::max(invoke_type_max, static_cast<size_t>(entry.invoke_type));
@@ -240,7 +241,7 @@ void StackMapStream::ComputeInvokeInfoEncoding(CodeInfoEncoding* encoding) {
void StackMapStream::ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
size_t dex_register_maps_bytes) {
uint32_t method_index_max = 0;
- uint32_t dex_pc_max = DexFile::kDexNoIndex;
+ uint32_t dex_pc_max = dex::kDexNoIndex;
uint32_t extra_data_max = 0;
uint32_t inline_info_index = 0;
@@ -256,8 +257,8 @@ void StackMapStream::ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
extra_data_max = std::max(
extra_data_max, Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
}
- if (inline_entry.dex_pc != DexFile::kDexNoIndex &&
- (dex_pc_max == DexFile::kDexNoIndex || dex_pc_max < inline_entry.dex_pc)) {
+ if (inline_entry.dex_pc != dex::kDexNoIndex &&
+ (dex_pc_max == dex::kDexNoIndex || dex_pc_max < inline_entry.dex_pc)) {
dex_pc_max = inline_entry.dex_pc;
}
}
@@ -362,7 +363,7 @@ void StackMapStream::FillInCodeInfo(MemoryRegion region) {
dex_register_locations_region);
stack_map.SetDexRegisterMapOffset(encoding.stack_map.encoding, offset);
- if (entry.dex_method_index != DexFile::kDexNoIndex) {
+ if (entry.dex_method_index != dex::kDexNoIndex) {
InvokeInfo invoke_info(code_info.GetInvokeInfo(encoding, invoke_info_idx));
invoke_info.SetNativePcCodeOffset(encoding.invoke_info.encoding, entry.native_pc_code_offset);
invoke_info.SetInvokeType(encoding.invoke_info.encoding, entry.invoke_type);
@@ -561,7 +562,7 @@ void StackMapStream::PrepareMethodIndices() {
for (StackMapEntry& stack_map : stack_maps_) {
const size_t index = dedupe.size();
const uint32_t method_index = stack_map.dex_method_index;
- if (method_index != DexFile::kDexNoIndex) {
+ if (method_index != dex::kDexNoIndex) {
stack_map.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
method_indices_[index] = method_index;
}
@@ -569,7 +570,7 @@ void StackMapStream::PrepareMethodIndices() {
for (InlineInfoEntry& inline_info : inline_infos_) {
const size_t index = dedupe.size();
const uint32_t method_index = inline_info.method_index;
- CHECK_NE(method_index, DexFile::kDexNoIndex);
+ CHECK_NE(method_index, dex::kDexNoIndex);
inline_info.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
method_indices_[index] = method_index;
}
@@ -629,7 +630,7 @@ void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
DCHECK_EQ(stack_mask.LoadBit(b), 0u);
}
}
- if (entry.dex_method_index != DexFile::kDexNoIndex) {
+ if (entry.dex_method_index != dex::kDexNoIndex) {
InvokeInfo invoke_info = code_info.GetInvokeInfo(encoding, invoke_info_index);
DCHECK_EQ(invoke_info.GetNativePcOffset(encoding.invoke_info.encoding, instruction_set_),
entry.native_pc_code_offset.Uint32Value(instruction_set_));
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index e6471e1bc5..a574566e33 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -126,7 +126,7 @@ class StackMapStream : public ValueObject {
};
struct InlineInfoEntry {
- uint32_t dex_pc; // DexFile::kDexNoIndex for intrinsified native methods.
+ uint32_t dex_pc; // dex::kDexNoIndex for intrinsified native methods.
ArtMethod* method;
uint32_t method_index;
DexRegisterMapEntry dex_register_entry;