diff options
Diffstat (limited to 'compiler')
| -rw-r--r-- | compiler/optimizing/intrinsics_arm.cc | 21 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 19 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_mips.cc | 9 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_mips64.cc | 9 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 7 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_x86_64.cc | 7 |
6 files changed, 36 insertions, 36 deletions
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc index 146fea1fe0..4e3ace498d 100644 --- a/compiler/optimizing/intrinsics_arm.cc +++ b/compiler/optimizing/intrinsics_arm.cc @@ -1115,15 +1115,15 @@ static void GenerateVisitStringIndexOf(HInvoke* invoke, ArenaAllocator* allocator, bool start_at_zero) { LocationSummary* locations = invoke->GetLocations(); - Register tmp_reg = locations->GetTemp(0).AsRegister<Register>(); // Note that the null check must have been done earlier. DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, - // or directly dispatch if we have a constant. + // or directly dispatch for a large constant, or omit slow-path for a small constant or a char. SlowPathCode* slow_path = nullptr; - if (invoke->InputAt(1)->IsIntConstant()) { + HInstruction* code_point = invoke->InputAt(1); + if (code_point->IsIntConstant()) { if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > std::numeric_limits<uint16_t>::max()) { // Always needs the slow-path. We could directly dispatch to it, but this case should be @@ -1134,16 +1134,18 @@ static void GenerateVisitStringIndexOf(HInvoke* invoke, __ Bind(slow_path->GetExitLabel()); return; } - } else { + } else if (code_point->GetType() != Primitive::kPrimChar) { Register char_reg = locations->InAt(1).AsRegister<Register>(); - __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max()); - __ cmp(char_reg, ShifterOperand(tmp_reg)); + // 0xffff is not modified immediate but 0x10000 is, so use `>= 0x10000` instead of `> 0xffff`. + __ cmp(char_reg, + ShifterOperand(static_cast<uint32_t>(std::numeric_limits<uint16_t>::max()) + 1)); slow_path = new (allocator) IntrinsicSlowPathARM(invoke); codegen->AddSlowPath(slow_path); - __ b(slow_path->GetEntryLabel(), HI); + __ b(slow_path->GetEntryLabel(), HS); } if (start_at_zero) { + Register tmp_reg = locations->GetTemp(0).AsRegister<Register>(); DCHECK_EQ(tmp_reg, R2); // Start-index = 0. __ LoadImmediate(tmp_reg, 0); @@ -1170,7 +1172,7 @@ void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) { locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); locations->SetOut(Location::RegisterLocation(R0)); - // Need a temp for slow-path codepoint compare, and need to send start-index=0. + // Need to send start-index=0. locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2))); } @@ -1190,9 +1192,6 @@ void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) { locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); locations->SetOut(Location::RegisterLocation(R0)); - - // Need a temp for slow-path codepoint compare. - locations->AddTemp(Location::RequiresRegister()); } void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) { diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index 1d8229674c..cc5fd65e2e 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -1390,15 +1390,15 @@ static void GenerateVisitStringIndexOf(HInvoke* invoke, ArenaAllocator* allocator, bool start_at_zero) { LocationSummary* locations = invoke->GetLocations(); - Register tmp_reg = WRegisterFrom(locations->GetTemp(0)); // Note that the null check must have been done earlier. DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, - // or directly dispatch if we have a constant. + // or directly dispatch for a large constant, or omit slow-path for a small constant or a char. SlowPathCodeARM64* slow_path = nullptr; - if (invoke->InputAt(1)->IsIntConstant()) { + HInstruction* code_point = invoke->InputAt(1); + if (code_point->IsIntConstant()) { if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) { // Always needs the slow-path. We could directly dispatch to it, but this case should be // rare, so for simplicity just put the full slow-path down and branch unconditionally. @@ -1408,17 +1408,17 @@ static void GenerateVisitStringIndexOf(HInvoke* invoke, __ Bind(slow_path->GetExitLabel()); return; } - } else { + } else if (code_point->GetType() != Primitive::kPrimChar) { Register char_reg = WRegisterFrom(locations->InAt(1)); - __ Mov(tmp_reg, 0xFFFF); - __ Cmp(char_reg, Operand(tmp_reg)); + __ Tst(char_reg, 0xFFFF0000); slow_path = new (allocator) IntrinsicSlowPathARM64(invoke); codegen->AddSlowPath(slow_path); - __ B(hi, slow_path->GetEntryLabel()); + __ B(ne, slow_path->GetEntryLabel()); } if (start_at_zero) { // Start-index = 0. + Register tmp_reg = WRegisterFrom(locations->GetTemp(0)); __ Mov(tmp_reg, 0); } @@ -1442,7 +1442,7 @@ void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) { locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); - // Need a temp for slow-path codepoint compare, and need to send start_index=0. + // Need to send start_index=0. locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2))); } @@ -1462,9 +1462,6 @@ void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) { locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); - - // Need a temp for slow-path codepoint compare. - locations->AddTemp(Location::RequiresRegister()); } void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) { diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc index 46195c104a..20b61f8a1c 100644 --- a/compiler/optimizing/intrinsics_mips.cc +++ b/compiler/optimizing/intrinsics_mips.cc @@ -2067,10 +2067,11 @@ static void GenerateStringIndexOf(HInvoke* invoke, // Note that the null check must have been done earlier. DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); - // Check for code points > 0xFFFF. Either a slow-path check when we - // don't know statically, or directly dispatch if we have a constant. + // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, + // or directly dispatch for a large constant, or omit slow-path for a small constant or a char. SlowPathCodeMIPS* slow_path = nullptr; - if (invoke->InputAt(1)->IsIntConstant()) { + HInstruction* code_point = invoke->InputAt(1); + if (code_point->IsIntConstant()) { if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) { // Always needs the slow-path. We could directly dispatch to it, // but this case should be rare, so for simplicity just put the @@ -2081,7 +2082,7 @@ static void GenerateStringIndexOf(HInvoke* invoke, __ Bind(slow_path->GetExitLabel()); return; } - } else { + } else if (code_point->GetType() != Primitive::kPrimChar) { Register char_reg = locations->InAt(1).AsRegister<Register>(); // The "bltu" conditional branch tests to see if the character value // fits in a valid 16-bit (MIPS halfword) value. If it doesn't then diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc index 1524e1e011..7188e1cc75 100644 --- a/compiler/optimizing/intrinsics_mips64.cc +++ b/compiler/optimizing/intrinsics_mips64.cc @@ -1563,10 +1563,11 @@ static void GenerateStringIndexOf(HInvoke* invoke, // Note that the null check must have been done earlier. DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); - // Check for code points > 0xFFFF. Either a slow-path check when we - // don't know statically, or directly dispatch if we have a constant. + // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, + // or directly dispatch for a large constant, or omit slow-path for a small constant or a char. SlowPathCodeMIPS64* slow_path = nullptr; - if (invoke->InputAt(1)->IsIntConstant()) { + HInstruction* code_point = invoke->InputAt(1); + if (code_point->IsIntConstant()) { if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) { // Always needs the slow-path. We could directly dispatch to it, // but this case should be rare, so for simplicity just put the @@ -1577,7 +1578,7 @@ static void GenerateStringIndexOf(HInvoke* invoke, __ Bind(slow_path->GetExitLabel()); return; } - } else { + } else if (code_point->GetType() != Primitive::kPrimChar) { GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>(); __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max()); slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke); diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 4aab3e2768..d0edecae22 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -1418,9 +1418,10 @@ static void GenerateStringIndexOf(HInvoke* invoke, DCHECK_EQ(out, EDI); // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, - // or directly dispatch if we have a constant. + // or directly dispatch for a large constant, or omit slow-path for a small constant or a char. SlowPathCode* slow_path = nullptr; - if (invoke->InputAt(1)->IsIntConstant()) { + HInstruction* code_point = invoke->InputAt(1); + if (code_point->IsIntConstant()) { if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > std::numeric_limits<uint16_t>::max()) { // Always needs the slow-path. We could directly dispatch to it, but this case should be @@ -1431,7 +1432,7 @@ static void GenerateStringIndexOf(HInvoke* invoke, __ Bind(slow_path->GetExitLabel()); return; } - } else { + } else if (code_point->GetType() != Primitive::kPrimChar) { __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max())); slow_path = new (allocator) IntrinsicSlowPathX86(invoke); codegen->AddSlowPath(slow_path); diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 1d32dc7bc5..4ee2368ff5 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -1517,9 +1517,10 @@ static void GenerateStringIndexOf(HInvoke* invoke, DCHECK_EQ(out.AsRegister(), RDI); // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, - // or directly dispatch if we have a constant. + // or directly dispatch for a large constant, or omit slow-path for a small constant or a char. SlowPathCode* slow_path = nullptr; - if (invoke->InputAt(1)->IsIntConstant()) { + HInstruction* code_point = invoke->InputAt(1); + if (code_point->IsIntConstant()) { if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > std::numeric_limits<uint16_t>::max()) { // Always needs the slow-path. We could directly dispatch to it, but this case should be @@ -1530,7 +1531,7 @@ static void GenerateStringIndexOf(HInvoke* invoke, __ Bind(slow_path->GetExitLabel()); return; } - } else { + } else if (code_point->GetType() != Primitive::kPrimChar) { __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max())); slow_path = new (allocator) IntrinsicSlowPathX86_64(invoke); codegen->AddSlowPath(slow_path); |