diff options
Diffstat (limited to 'compiler/optimizing')
| -rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 3 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 3 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_mips64.cc | 3 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 3 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 3 | ||||
| -rw-r--r-- | compiler/optimizing/instruction_simplifier.cc | 13 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_mips64.cc | 714 | ||||
| -rw-r--r-- | compiler/optimizing/reference_type_propagation.cc | 4 |
8 files changed, 706 insertions, 40 deletions
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 3e6cad83fa..92a5878476 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -1376,8 +1376,7 @@ void LocationsBuilderARM::VisitDeoptimize(HDeoptimize* deoptimize) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath); HInstruction* cond = deoptimize->InputAt(0); - DCHECK(cond->IsCondition()); - if (cond->AsCondition()->NeedsMaterialization()) { + if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { locations->SetInAt(0, Location::RequiresRegister()); } } diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index ffb9b794fc..f68b11b504 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -2353,8 +2353,7 @@ void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath); HInstruction* cond = deoptimize->InputAt(0); - DCHECK(cond->IsCondition()); - if (cond->AsCondition()->NeedsMaterialization()) { + if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { locations->SetInAt(0, Location::RequiresRegister()); } } diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc index eb20291e20..f561c97eb0 100644 --- a/compiler/optimizing/code_generator_mips64.cc +++ b/compiler/optimizing/code_generator_mips64.cc @@ -2223,8 +2223,7 @@ void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath); HInstruction* cond = deoptimize->InputAt(0); - DCHECK(cond->IsCondition()); - if (cond->AsCondition()->NeedsMaterialization()) { + if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { locations->SetInAt(0, Location::RequiresRegister()); } } diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 2aea859b7d..963eec2529 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -1306,8 +1306,7 @@ void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath); HInstruction* cond = deoptimize->InputAt(0); - DCHECK(cond->IsCondition()); - if (cond->AsCondition()->NeedsMaterialization()) { + if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { locations->SetInAt(0, Location::Any()); } } diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index bf570f581b..ed2e4ca87c 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -1252,8 +1252,7 @@ void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath); HInstruction* cond = deoptimize->InputAt(0); - DCHECK(cond->IsCondition()); - if (cond->AsCondition()->NeedsMaterialization()) { + if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { locations->SetInAt(0, Location::Any()); } } diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc index d468540091..7814eb9c11 100644 --- a/compiler/optimizing/instruction_simplifier.cc +++ b/compiler/optimizing/instruction_simplifier.cc @@ -73,6 +73,7 @@ class InstructionSimplifierVisitor : public HGraphDelegateVisitor { void VisitInstanceOf(HInstanceOf* instruction) OVERRIDE; void VisitFakeString(HFakeString* fake_string) OVERRIDE; void VisitInvoke(HInvoke* invoke) OVERRIDE; + void VisitDeoptimize(HDeoptimize* deoptimize) OVERRIDE; bool CanEnsureNotNullAt(HInstruction* instr, HInstruction* at) const; @@ -1151,4 +1152,16 @@ void InstructionSimplifierVisitor::VisitInvoke(HInvoke* instruction) { } } +void InstructionSimplifierVisitor::VisitDeoptimize(HDeoptimize* deoptimize) { + HInstruction* cond = deoptimize->InputAt(0); + if (cond->IsConstant()) { + if (cond->AsIntConstant()->IsZero()) { + // Never deopt: instruction can be removed. + deoptimize->GetBlock()->RemoveInstruction(deoptimize); + } else { + // Always deopt. + } + } +} + } // namespace art diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc index 764a11475f..fe16d00b72 100644 --- a/compiler/optimizing/intrinsics_mips64.cc +++ b/compiler/optimizing/intrinsics_mips64.cc @@ -43,6 +43,93 @@ ArenaAllocator* IntrinsicCodeGeneratorMIPS64::GetAllocator() { return codegen_->GetGraph()->GetArena(); } +#define __ codegen->GetAssembler()-> + +static void MoveFromReturnRegister(Location trg, + Primitive::Type type, + CodeGeneratorMIPS64* codegen) { + if (!trg.IsValid()) { + DCHECK_EQ(type, Primitive::kPrimVoid); + return; + } + + DCHECK_NE(type, Primitive::kPrimVoid); + + if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) { + GpuRegister trg_reg = trg.AsRegister<GpuRegister>(); + if (trg_reg != V0) { + __ Move(V0, trg_reg); + } + } else { + FpuRegister trg_reg = trg.AsFpuRegister<FpuRegister>(); + if (trg_reg != F0) { + if (type == Primitive::kPrimFloat) { + __ MovS(F0, trg_reg); + } else { + __ MovD(F0, trg_reg); + } + } + } +} + +static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS64* codegen) { + InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor; + IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); +} + +// Slow-path for fallback (calling the managed code to handle the +// intrinsic) in an intrinsified call. This will copy the arguments +// into the positions for a regular call. +// +// Note: The actual parameters are required to be in the locations +// given by the invoke's location summary. If an intrinsic +// modifies those locations before a slowpath call, they must be +// restored! +class IntrinsicSlowPathMIPS64 : public SlowPathCodeMIPS64 { + public: + explicit IntrinsicSlowPathMIPS64(HInvoke* invoke) : invoke_(invoke) { } + + void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { + CodeGeneratorMIPS64* codegen = down_cast<CodeGeneratorMIPS64*>(codegen_in); + + __ Bind(GetEntryLabel()); + + SaveLiveRegisters(codegen, invoke_->GetLocations()); + + MoveArguments(invoke_, codegen); + + if (invoke_->IsInvokeStaticOrDirect()) { + codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), + Location::RegisterLocation(A0)); + codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this); + } else { + UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented"; + UNREACHABLE(); + } + + // Copy the result back to the expected output. + Location out = invoke_->GetLocations()->Out(); + if (out.IsValid()) { + DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory. + DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg())); + MoveFromReturnRegister(out, invoke_->GetType(), codegen); + } + + RestoreLiveRegisters(codegen, invoke_->GetLocations()); + __ B(GetExitLabel()); + } + + const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS64"; } + + private: + // The instruction where this slow path is happening. + HInvoke* const invoke_; + + DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS64); +}; + +#undef __ + bool IntrinsicLocationsBuilderMIPS64::TryDispatch(HInvoke* invoke) { Dispatch(invoke); LocationSummary* res = invoke->GetLocations(); @@ -185,7 +272,7 @@ void IntrinsicCodeGeneratorMIPS64::VisitShortReverseBytes(HInvoke* invoke) { GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler()); } -static void GenCountZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { +static void GenNumberOfLeadingZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>(); GpuRegister out = locations->Out().AsRegister<GpuRegister>(); @@ -202,7 +289,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* } void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { - GenCountZeroes(invoke->GetLocations(), false, GetAssembler()); + GenNumberOfLeadingZeroes(invoke->GetLocations(), false, GetAssembler()); } // int java.lang.Long.numberOfLeadingZeros(long i) @@ -211,7 +298,103 @@ void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* inv } void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { - GenCountZeroes(invoke->GetLocations(), true, GetAssembler()); + GenNumberOfLeadingZeroes(invoke->GetLocations(), true, GetAssembler()); +} + +static void GenNumberOfTrailingZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { + Location in = locations->InAt(0); + Location out = locations->Out(); + + if (is64bit) { + __ Dsbh(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>()); + __ Dshd(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>()); + __ Dbitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>()); + __ Dclz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>()); + } else { + __ Rotr(out.AsRegister<GpuRegister>(), in.AsRegister<GpuRegister>(), 16); + __ Wsbh(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>()); + __ Bitswap(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>()); + __ Clz(out.AsRegister<GpuRegister>(), out.AsRegister<GpuRegister>()); + } +} + +// int java.lang.Integer.numberOfTrailingZeros(int i) +void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { + GenNumberOfTrailingZeroes(invoke->GetLocations(), false, GetAssembler()); +} + +// int java.lang.Long.numberOfTrailingZeros(long i) +void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { + GenNumberOfTrailingZeroes(invoke->GetLocations(), true, GetAssembler()); +} + +static void GenRotateRight(HInvoke* invoke, + Primitive::Type type, + Mips64Assembler* assembler) { + DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); + + LocationSummary* locations = invoke->GetLocations(); + GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>(); + GpuRegister out = locations->Out().AsRegister<GpuRegister>(); + + if (invoke->InputAt(1)->IsIntConstant()) { + uint32_t shift = static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()); + if (type == Primitive::kPrimInt) { + shift &= 0x1f; + __ Rotr(out, in, shift); + } else { + shift &= 0x3f; + if (shift < 32) { + __ Drotr(out, in, shift); + } else { + shift &= 0x1f; + __ Drotr32(out, in, shift); + } + } + } else { + GpuRegister shamt = locations->InAt(1).AsRegister<GpuRegister>(); + if (type == Primitive::kPrimInt) { + __ Rotrv(out, in, shamt); + } else { + __ Drotrv(out, in, shamt); + } + } +} + +// int java.lang.Integer.rotateRight(int i, int distance) +void IntrinsicLocationsBuilderMIPS64::VisitIntegerRotateRight(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS64::VisitIntegerRotateRight(HInvoke* invoke) { + GenRotateRight(invoke, Primitive::kPrimInt, GetAssembler()); +} + +// int java.lang.Long.rotateRight(long i, int distance) +void IntrinsicLocationsBuilderMIPS64::VisitLongRotateRight(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS64::VisitLongRotateRight(HInvoke* invoke) { + GenRotateRight(invoke, Primitive::kPrimLong, GetAssembler()); } static void GenReverse(LocationSummary* locations, @@ -765,6 +948,505 @@ void IntrinsicCodeGeneratorMIPS64::VisitThreadCurrentThread(HInvoke* invoke) { Thread::PeerOffset<kMips64PointerSize>().Int32Value()); } +static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { + LocationSummary* locations = new (arena) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::NoLocation()); // Unused receiver. + locations->SetInAt(1, Location::RequiresRegister()); + locations->SetInAt(2, Location::RequiresRegister()); + locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); +} + +static void GenUnsafeGet(HInvoke* invoke, + Primitive::Type type, + bool is_volatile, + CodeGeneratorMIPS64* codegen) { + LocationSummary* locations = invoke->GetLocations(); + DCHECK((type == Primitive::kPrimInt) || + (type == Primitive::kPrimLong) || + (type == Primitive::kPrimNot)); + Mips64Assembler* assembler = codegen->GetAssembler(); + // Object pointer. + GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>(); + // Long offset. + GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>(); + GpuRegister trg = locations->Out().AsRegister<GpuRegister>(); + + __ Daddu(TMP, base, offset); + if (is_volatile) { + __ Sync(0); + } + switch (type) { + case Primitive::kPrimInt: + __ Lw(trg, TMP, 0); + break; + + case Primitive::kPrimNot: + __ Lwu(trg, TMP, 0); + break; + + case Primitive::kPrimLong: + __ Ld(trg, TMP, 0); + break; + + default: + LOG(FATAL) << "Unsupported op size " << type; + UNREACHABLE(); + } +} + +// int sun.misc.Unsafe.getInt(Object o, long offset) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) { + CreateIntIntIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) { + GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); +} + +// int sun.misc.Unsafe.getIntVolatile(Object o, long offset) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) { + CreateIntIntIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) { + GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); +} + +// long sun.misc.Unsafe.getLong(Object o, long offset) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) { + CreateIntIntIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) { + GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); +} + +// long sun.misc.Unsafe.getLongVolatile(Object o, long offset) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { + CreateIntIntIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { + GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); +} + +// Object sun.misc.Unsafe.getObject(Object o, long offset) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) { + CreateIntIntIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) { + GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); +} + +// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { + CreateIntIntIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { + GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); +} + +static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { + LocationSummary* locations = new (arena) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::NoLocation()); // Unused receiver. + locations->SetInAt(1, Location::RequiresRegister()); + locations->SetInAt(2, Location::RequiresRegister()); + locations->SetInAt(3, Location::RequiresRegister()); +} + +static void GenUnsafePut(LocationSummary* locations, + Primitive::Type type, + bool is_volatile, + bool is_ordered, + CodeGeneratorMIPS64* codegen) { + DCHECK((type == Primitive::kPrimInt) || + (type == Primitive::kPrimLong) || + (type == Primitive::kPrimNot)); + Mips64Assembler* assembler = codegen->GetAssembler(); + // Object pointer. + GpuRegister base = locations->InAt(1).AsRegister<GpuRegister>(); + // Long offset. + GpuRegister offset = locations->InAt(2).AsRegister<GpuRegister>(); + GpuRegister value = locations->InAt(3).AsRegister<GpuRegister>(); + + __ Daddu(TMP, base, offset); + if (is_volatile || is_ordered) { + __ Sync(0); + } + switch (type) { + case Primitive::kPrimInt: + case Primitive::kPrimNot: + __ Sw(value, TMP, 0); + break; + + case Primitive::kPrimLong: + __ Sd(value, TMP, 0); + break; + + default: + LOG(FATAL) << "Unsupported op size " << type; + UNREACHABLE(); + } + if (is_volatile) { + __ Sync(0); + } + + if (type == Primitive::kPrimNot) { + codegen->MarkGCCard(base, value); + } +} + +// void sun.misc.Unsafe.putInt(Object o, long offset, int x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); +} + +// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); +} + +// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); +} + +// void sun.misc.Unsafe.putObject(Object o, long offset, Object x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); +} + +// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); +} + +// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); +} + +// void sun.misc.Unsafe.putLong(Object o, long offset, long x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); +} + +// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); +} + +// void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x) +void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) { + CreateIntIntIntIntToVoid(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) { + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); +} + +// char java.lang.String.charAt(int index) +void IntrinsicLocationsBuilderMIPS64::VisitStringCharAt(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCallOnSlowPath, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RequiresRegister()); + locations->SetOut(Location::SameAsFirstInput()); +} + +void IntrinsicCodeGeneratorMIPS64::VisitStringCharAt(HInvoke* invoke) { + LocationSummary* locations = invoke->GetLocations(); + Mips64Assembler* assembler = GetAssembler(); + + // Location of reference to data array + const int32_t value_offset = mirror::String::ValueOffset().Int32Value(); + // Location of count + const int32_t count_offset = mirror::String::CountOffset().Int32Value(); + + GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>(); + GpuRegister idx = locations->InAt(1).AsRegister<GpuRegister>(); + GpuRegister out = locations->Out().AsRegister<GpuRegister>(); + + // TODO: Maybe we can support range check elimination. Overall, + // though, I think it's not worth the cost. + // TODO: For simplicity, the index parameter is requested in a + // register, so different from Quick we will not optimize the + // code for constants (which would save a register). + + SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke); + codegen_->AddSlowPath(slow_path); + + // Load the string size + __ Lw(TMP, obj, count_offset); + codegen_->MaybeRecordImplicitNullCheck(invoke); + // Revert to slow path if idx is too large, or negative + __ Bgeuc(idx, TMP, slow_path->GetEntryLabel()); + + // out = obj[2*idx]. + __ Sll(TMP, idx, 1); // idx * 2 + __ Daddu(TMP, TMP, obj); // Address of char at location idx + __ Lhu(out, TMP, value_offset); // Load char at location idx + + __ Bind(slow_path->GetExitLabel()); +} + +// int java.lang.String.compareTo(String anotherString) +void IntrinsicLocationsBuilderMIPS64::VisitStringCompareTo(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCall, + kIntrinsified); + InvokeRuntimeCallingConvention calling_convention; + locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); + Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); + locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>())); +} + +void IntrinsicCodeGeneratorMIPS64::VisitStringCompareTo(HInvoke* invoke) { + Mips64Assembler* assembler = GetAssembler(); + LocationSummary* locations = invoke->GetLocations(); + + // Note that the null check must have been done earlier. + DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); + + GpuRegister argument = locations->InAt(1).AsRegister<GpuRegister>(); + SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke); + codegen_->AddSlowPath(slow_path); + __ Beqzc(argument, slow_path->GetEntryLabel()); + + __ LoadFromOffset(kLoadDoubleword, + TMP, + TR, + QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, + pStringCompareTo).Int32Value()); + __ Jalr(TMP); + __ Nop(); + __ Bind(slow_path->GetExitLabel()); +} + +static void GenerateStringIndexOf(HInvoke* invoke, + Mips64Assembler* assembler, + CodeGeneratorMIPS64* codegen, + ArenaAllocator* allocator, + bool start_at_zero) { + LocationSummary* locations = invoke->GetLocations(); + GpuRegister tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<GpuRegister>() : TMP; + + // Note that the null check must have been done earlier. + DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); + + // Check for code points > 0xFFFF. Either a slow-path check when we + // don't know statically, or directly dispatch if we have a constant. + SlowPathCodeMIPS64* slow_path = nullptr; + if (invoke->InputAt(1)->IsIntConstant()) { + if (!IsUint<16>(invoke->InputAt(1)->AsIntConstant()->GetValue())) { + // Always needs the slow-path. We could directly dispatch to it, + // but this case should be rare, so for simplicity just put the + // full slow-path down and branch unconditionally. + slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke); + codegen->AddSlowPath(slow_path); + __ B(slow_path->GetEntryLabel()); + __ Bind(slow_path->GetExitLabel()); + return; + } + } else { + GpuRegister char_reg = locations->InAt(1).AsRegister<GpuRegister>(); + __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max()); + slow_path = new (allocator) IntrinsicSlowPathMIPS64(invoke); + codegen->AddSlowPath(slow_path); + __ Bltuc(tmp_reg, char_reg, slow_path->GetEntryLabel()); // UTF-16 required + } + + if (start_at_zero) { + DCHECK_EQ(tmp_reg, A2); + // Start-index = 0. + __ Clear(tmp_reg); + } else { + __ Slt(TMP, A2, ZERO); // if fromIndex < 0 + __ Seleqz(A2, A2, TMP); // fromIndex = 0 + } + + __ LoadFromOffset(kLoadDoubleword, + TMP, + TR, + QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pIndexOf).Int32Value()); + __ Jalr(TMP); + __ Nop(); + + if (slow_path != nullptr) { + __ Bind(slow_path->GetExitLabel()); + } +} + +// int java.lang.String.indexOf(int ch) +void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCall, + kIntrinsified); + // We have a hand-crafted assembly stub that follows the runtime + // calling convention. So it's best to align the inputs accordingly. + InvokeRuntimeCallingConvention calling_convention; + locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); + Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); + locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>())); + + // Need a temp for slow-path codepoint compare, and need to send start-index=0. + locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2))); +} + +void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) { + GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); +} + +// int java.lang.String.indexOf(int ch, int fromIndex) +void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCall, + kIntrinsified); + // We have a hand-crafted assembly stub that follows the runtime + // calling convention. So it's best to align the inputs accordingly. + InvokeRuntimeCallingConvention calling_convention; + locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); + locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); + Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); + locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>())); +} + +void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) { + GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); +} + +// java.lang.String.String(byte[] bytes) +void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCall, + kIntrinsified); + InvokeRuntimeCallingConvention calling_convention; + locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); + locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); + locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3))); + Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); + locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>())); +} + +void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromBytes(HInvoke* invoke) { + Mips64Assembler* assembler = GetAssembler(); + LocationSummary* locations = invoke->GetLocations(); + + GpuRegister byte_array = locations->InAt(0).AsRegister<GpuRegister>(); + SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke); + codegen_->AddSlowPath(slow_path); + __ Beqzc(byte_array, slow_path->GetEntryLabel()); + + __ LoadFromOffset(kLoadDoubleword, + TMP, + TR, + QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromBytes).Int32Value()); + codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); + __ Jalr(TMP); + __ Nop(); + __ Bind(slow_path->GetExitLabel()); +} + +// java.lang.String.String(char[] value) +void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCall, + kIntrinsified); + InvokeRuntimeCallingConvention calling_convention; + locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); + locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); + Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); + locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>())); +} + +void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromChars(HInvoke* invoke) { + Mips64Assembler* assembler = GetAssembler(); + + __ LoadFromOffset(kLoadDoubleword, + TMP, + TR, + QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromChars).Int32Value()); + codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); + __ Jalr(TMP); + __ Nop(); +} + +// java.lang.String.String(String original) +void IntrinsicLocationsBuilderMIPS64::VisitStringNewStringFromString(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kCall, + kIntrinsified); + InvokeRuntimeCallingConvention calling_convention; + locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); + locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); + Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt); + locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<GpuRegister>())); +} + +void IntrinsicCodeGeneratorMIPS64::VisitStringNewStringFromString(HInvoke* invoke) { + Mips64Assembler* assembler = GetAssembler(); + LocationSummary* locations = invoke->GetLocations(); + + GpuRegister string_to_copy = locations->InAt(0).AsRegister<GpuRegister>(); + SlowPathCodeMIPS64* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS64(invoke); + codegen_->AddSlowPath(slow_path); + __ Beqzc(string_to_copy, slow_path->GetEntryLabel()); + + __ LoadFromOffset(kLoadDoubleword, + TMP, + TR, + QUICK_ENTRYPOINT_OFFSET(kMips64WordSize, pAllocStringFromString).Int32Value()); + codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); + __ Jalr(TMP); + __ Nop(); + __ Bind(slow_path->GetExitLabel()); +} + // Unimplemented intrinsics. #define UNIMPLEMENTED_INTRINSIC(Name) \ @@ -776,38 +1458,12 @@ void IntrinsicCodeGeneratorMIPS64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSE UNIMPLEMENTED_INTRINSIC(MathRoundDouble) UNIMPLEMENTED_INTRINSIC(MathRoundFloat) -UNIMPLEMENTED_INTRINSIC(UnsafeGet) -UNIMPLEMENTED_INTRINSIC(UnsafeGetVolatile) -UNIMPLEMENTED_INTRINSIC(UnsafeGetLong) -UNIMPLEMENTED_INTRINSIC(UnsafeGetLongVolatile) -UNIMPLEMENTED_INTRINSIC(UnsafeGetObject) -UNIMPLEMENTED_INTRINSIC(UnsafeGetObjectVolatile) -UNIMPLEMENTED_INTRINSIC(UnsafePut) -UNIMPLEMENTED_INTRINSIC(UnsafePutOrdered) -UNIMPLEMENTED_INTRINSIC(UnsafePutVolatile) -UNIMPLEMENTED_INTRINSIC(UnsafePutObject) -UNIMPLEMENTED_INTRINSIC(UnsafePutObjectOrdered) -UNIMPLEMENTED_INTRINSIC(UnsafePutObjectVolatile) -UNIMPLEMENTED_INTRINSIC(UnsafePutLong) -UNIMPLEMENTED_INTRINSIC(UnsafePutLongOrdered) -UNIMPLEMENTED_INTRINSIC(UnsafePutLongVolatile) UNIMPLEMENTED_INTRINSIC(UnsafeCASInt) UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) UNIMPLEMENTED_INTRINSIC(UnsafeCASObject) -UNIMPLEMENTED_INTRINSIC(StringCharAt) -UNIMPLEMENTED_INTRINSIC(StringCompareTo) UNIMPLEMENTED_INTRINSIC(StringEquals) -UNIMPLEMENTED_INTRINSIC(StringIndexOf) -UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter) -UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes) -UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars) -UNIMPLEMENTED_INTRINSIC(StringNewStringFromString) UNIMPLEMENTED_INTRINSIC(LongRotateLeft) -UNIMPLEMENTED_INTRINSIC(LongRotateRight) -UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros) UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) -UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) -UNIMPLEMENTED_INTRINSIC(IntegerNumberOfTrailingZeros) UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck) diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc index f7a7e420bb..a1feaf77bd 100644 --- a/compiler/optimizing/reference_type_propagation.cc +++ b/compiler/optimizing/reference_type_propagation.cc @@ -756,7 +756,9 @@ void ReferenceTypePropagation::ProcessWorklist() { while (!worklist_.empty()) { HInstruction* instruction = worklist_.back(); worklist_.pop_back(); - if (UpdateNullability(instruction) || UpdateReferenceTypeInfo(instruction)) { + bool updated_nullability = UpdateNullability(instruction); + bool updated_reference_type = UpdateReferenceTypeInfo(instruction); + if (updated_nullability || updated_reference_type) { AddDependentInstructionsToWorklist(instruction); } } |