Merge "BuildInvoke refactor: extract String.init in its own code path"
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc
index 8d41595..f74b079 100644
--- a/compiler/driver/compiler_driver.cc
+++ b/compiler/driver/compiler_driver.cc
@@ -492,8 +492,18 @@
std::unique_ptr<ThreadPool> thread_pool(
new ThreadPool("Compiler driver thread pool", thread_count_ - 1));
VLOG(compiler) << "Before precompile " << GetMemoryUsageString(false);
+ // Precompile:
+ // 1) Load image classes
+ // 2) Resolve all classes
+ // 3) Attempt to verify all classes
+ // 4) Attempt to initialize image classes, and trivially initialized classes
PreCompile(class_loader, dex_files, thread_pool.get(), timings);
- Compile(class_loader, dex_files, thread_pool.get(), timings);
+ // Compile:
+ // 1) Compile all classes and methods enabled for compilation. May fall back to dex-to-dex
+ // compilation.
+ if (!GetCompilerOptions().VerifyAtRuntime()) {
+ Compile(class_loader, dex_files, thread_pool.get(), timings);
+ }
if (dump_stats_) {
stats_->Dump();
}
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index e302317..d0bc55c 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -2118,6 +2118,152 @@
GenLeadingZeros(assembler, invoke, /* is_long */ true);
}
+static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
+ LocationSummary* locations = new (arena) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ if (is_long) {
+ locations->SetInAt(0, Location::RequiresRegister());
+ } else {
+ locations->SetInAt(0, Location::Any());
+ }
+ locations->SetOut(Location::RequiresRegister());
+}
+
+static void GenTrailingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
+ LocationSummary* locations = invoke->GetLocations();
+ Location src = locations->InAt(0);
+ Register out = locations->Out().AsRegister<Register>();
+
+ if (invoke->InputAt(0)->IsConstant()) {
+ // Evaluate this at compile time.
+ int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
+ if (value == 0) {
+ value = is_long ? 64 : 32;
+ } else {
+ value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
+ }
+ if (value == 0) {
+ __ xorl(out, out);
+ } else {
+ __ movl(out, Immediate(value));
+ }
+ return;
+ }
+
+ // Handle the non-constant cases.
+ if (!is_long) {
+ if (src.IsRegister()) {
+ __ bsfl(out, src.AsRegister<Register>());
+ } else {
+ DCHECK(src.IsStackSlot());
+ __ bsfl(out, Address(ESP, src.GetStackIndex()));
+ }
+
+ // BSF sets ZF if the input was zero, and the output is undefined.
+ NearLabel done;
+ __ j(kNotEqual, &done);
+
+ // Fix the zero case with the expected result.
+ __ movl(out, Immediate(32));
+
+ __ Bind(&done);
+ return;
+ }
+
+ // 64 bit case needs to worry about both parts of the register.
+ DCHECK(src.IsRegisterPair());
+ Register src_lo = src.AsRegisterPairLow<Register>();
+ Register src_hi = src.AsRegisterPairHigh<Register>();
+ NearLabel done, all_zeroes;
+
+ // If the low word is zero, then ZF will be set. If not, we have the answer.
+ __ bsfl(out, src_lo);
+ __ j(kNotEqual, &done);
+
+ // Low word was zero. We have to compute the high word count and add 32.
+ __ bsfl(out, src_hi);
+ __ j(kEqual, &all_zeroes);
+
+ // We had a valid result. Add 32 to account for the low word being zero.
+ __ addl(out, Immediate(32));
+ __ jmp(&done);
+
+ // All zero case.
+ __ Bind(&all_zeroes);
+ __ movl(out, Immediate(64));
+
+ __ Bind(&done);
+}
+
+void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
+ CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
+}
+
+void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
+ X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
+ GenTrailingZeros(assembler, invoke, /* is_long */ false);
+}
+
+void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
+ CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
+}
+
+void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
+ X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
+ GenTrailingZeros(assembler, invoke, /* is_long */ true);
+}
+
+static void CreateRotateLocations(ArenaAllocator* arena, HInvoke* invoke) {
+ LocationSummary* locations = new (arena) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ // The shift count needs to be in CL or a constant.
+ locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, invoke->InputAt(1)));
+ locations->SetOut(Location::SameAsFirstInput());
+}
+
+static void GenRotate(X86Assembler* assembler, HInvoke* invoke, bool is_left) {
+ LocationSummary* locations = invoke->GetLocations();
+ Register first_reg = locations->InAt(0).AsRegister<Register>();
+ Location second = locations->InAt(1);
+
+ if (second.IsRegister()) {
+ Register second_reg = second.AsRegister<Register>();
+ if (is_left) {
+ __ roll(first_reg, second_reg);
+ } else {
+ __ rorl(first_reg, second_reg);
+ }
+ } else {
+ Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
+ if (is_left) {
+ __ roll(first_reg, imm);
+ } else {
+ __ rorl(first_reg, imm);
+ }
+ }
+}
+
+void IntrinsicLocationsBuilderX86::VisitIntegerRotateLeft(HInvoke* invoke) {
+ CreateRotateLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitIntegerRotateLeft(HInvoke* invoke) {
+ X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
+ GenRotate(assembler, invoke, /* is_left */ true);
+}
+
+void IntrinsicLocationsBuilderX86::VisitIntegerRotateRight(HInvoke* invoke) {
+ CreateRotateLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitIntegerRotateRight(HInvoke* invoke) {
+ X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
+ GenRotate(assembler, invoke, /* is_left */ false);
+}
+
// Unimplemented intrinsics.
#define UNIMPLEMENTED_INTRINSIC(Name) \
@@ -2129,11 +2275,7 @@
UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
-UNIMPLEMENTED_INTRINSIC(IntegerNumberOfTrailingZeros)
-UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
UNIMPLEMENTED_INTRINSIC(LongRotateRight)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
#undef UNIMPLEMENTED_INTRINSIC
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 51980af..315cbab 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1925,6 +1925,165 @@
GenLeadingZeros(assembler, invoke, /* is_long */ true);
}
+static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke) {
+ LocationSummary* locations = new (arena) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::Any());
+ locations->SetOut(Location::RequiresRegister());
+}
+
+static void GenTrailingZeros(X86_64Assembler* assembler, HInvoke* invoke, bool is_long) {
+ LocationSummary* locations = invoke->GetLocations();
+ Location src = locations->InAt(0);
+ CpuRegister out = locations->Out().AsRegister<CpuRegister>();
+
+ int zero_value_result = is_long ? 64 : 32;
+ if (invoke->InputAt(0)->IsConstant()) {
+ // Evaluate this at compile time.
+ int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
+ if (value == 0) {
+ value = zero_value_result;
+ } else {
+ value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
+ }
+ if (value == 0) {
+ __ xorl(out, out);
+ } else {
+ __ movl(out, Immediate(value));
+ }
+ return;
+ }
+
+ // Handle the non-constant cases.
+ if (src.IsRegister()) {
+ if (is_long) {
+ __ bsfq(out, src.AsRegister<CpuRegister>());
+ } else {
+ __ bsfl(out, src.AsRegister<CpuRegister>());
+ }
+ } else if (is_long) {
+ DCHECK(src.IsDoubleStackSlot());
+ __ bsfq(out, Address(CpuRegister(RSP), src.GetStackIndex()));
+ } else {
+ DCHECK(src.IsStackSlot());
+ __ bsfl(out, Address(CpuRegister(RSP), src.GetStackIndex()));
+ }
+
+ // BSF sets ZF if the input was zero, and the output is undefined.
+ NearLabel done;
+ __ j(kNotEqual, &done);
+
+ // Fix the zero case with the expected result.
+ __ movl(out, Immediate(zero_value_result));
+
+ __ Bind(&done);
+}
+
+void IntrinsicLocationsBuilderX86_64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
+ CreateTrailingZeroLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorX86_64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
+ X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen_->GetAssembler());
+ GenTrailingZeros(assembler, invoke, /* is_long */ false);
+}
+
+void IntrinsicLocationsBuilderX86_64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
+ CreateTrailingZeroLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorX86_64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
+ X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen_->GetAssembler());
+ GenTrailingZeros(assembler, invoke, /* is_long */ true);
+}
+
+static void CreateRotateLocations(ArenaAllocator* arena, HInvoke* invoke) {
+ LocationSummary* locations = new (arena) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ // The shift count needs to be in CL or a constant.
+ locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, invoke->InputAt(1)));
+ locations->SetOut(Location::SameAsFirstInput());
+}
+
+static void GenRotate(X86_64Assembler* assembler, HInvoke* invoke, bool is_long, bool is_left) {
+ LocationSummary* locations = invoke->GetLocations();
+ CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
+ Location second = locations->InAt(1);
+
+ if (is_long) {
+ if (second.IsRegister()) {
+ CpuRegister second_reg = second.AsRegister<CpuRegister>();
+ if (is_left) {
+ __ rolq(first_reg, second_reg);
+ } else {
+ __ rorq(first_reg, second_reg);
+ }
+ } else {
+ Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
+ if (is_left) {
+ __ rolq(first_reg, imm);
+ } else {
+ __ rorq(first_reg, imm);
+ }
+ }
+ } else {
+ if (second.IsRegister()) {
+ CpuRegister second_reg = second.AsRegister<CpuRegister>();
+ if (is_left) {
+ __ roll(first_reg, second_reg);
+ } else {
+ __ rorl(first_reg, second_reg);
+ }
+ } else {
+ Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
+ if (is_left) {
+ __ roll(first_reg, imm);
+ } else {
+ __ rorl(first_reg, imm);
+ }
+ }
+ }
+}
+
+void IntrinsicLocationsBuilderX86_64::VisitIntegerRotateLeft(HInvoke* invoke) {
+ CreateRotateLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorX86_64::VisitIntegerRotateLeft(HInvoke* invoke) {
+ X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen_->GetAssembler());
+ GenRotate(assembler, invoke, /* is_long */ false, /* is_left */ true);
+}
+
+void IntrinsicLocationsBuilderX86_64::VisitIntegerRotateRight(HInvoke* invoke) {
+ CreateRotateLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorX86_64::VisitIntegerRotateRight(HInvoke* invoke) {
+ X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen_->GetAssembler());
+ GenRotate(assembler, invoke, /* is_long */ false, /* is_left */ false);
+}
+
+void IntrinsicLocationsBuilderX86_64::VisitLongRotateLeft(HInvoke* invoke) {
+ CreateRotateLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorX86_64::VisitLongRotateLeft(HInvoke* invoke) {
+ X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen_->GetAssembler());
+ GenRotate(assembler, invoke, /* is_long */ true, /* is_left */ true);
+}
+
+void IntrinsicLocationsBuilderX86_64::VisitLongRotateRight(HInvoke* invoke) {
+ CreateRotateLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorX86_64::VisitLongRotateRight(HInvoke* invoke) {
+ X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen_->GetAssembler());
+ GenRotate(assembler, invoke, /* is_long */ true, /* is_left */ false);
+}
+
// Unimplemented intrinsics.
#define UNIMPLEMENTED_INTRINSIC(Name) \
@@ -1935,12 +2094,6 @@
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
-UNIMPLEMENTED_INTRINSIC(IntegerNumberOfTrailingZeros)
-UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
-UNIMPLEMENTED_INTRINSIC(LongRotateRight)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
-UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
#undef UNIMPLEMENTED_INTRINSIC