ART: Introduce compiler data type.
Replace most uses of the runtime's Primitive in compiler
with a new class DataType. This prepares for introducing
new types, such as Uint8, that the runtime does not need
to know about.
Test: m test-art-host-gtest
Test: testrunner.py --host
Bug: 23964345
Change-Id: Iec2ad82454eec678fffcd8279a9746b90feb9b0c
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 96efe7f..75a1ce7 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -76,16 +76,16 @@
#define __ codegen->GetVIXLAssembler()->
static void MoveFromReturnRegister(Location trg,
- Primitive::Type type,
+ DataType::Type type,
CodeGeneratorARM64* codegen) {
if (!trg.IsValid()) {
- DCHECK(type == Primitive::kPrimVoid);
+ DCHECK(type == DataType::Type::kVoid);
return;
}
- DCHECK_NE(type, Primitive::kPrimVoid);
+ DCHECK_NE(type, DataType::Type::kVoid);
- if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
+ if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Register trg_reg = RegisterFrom(trg, type);
Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
__ Mov(trg_reg, res_reg, kDiscardForSameWReg);
@@ -173,7 +173,7 @@
DCHECK(instruction_->GetLocations()->Intrinsified());
DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
- const int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
+ const int32_t element_size = DataType::Size(DataType::Type::kReference);
Register src_curr_addr = XRegisterFrom(locations->GetTemp(0));
Register dst_curr_addr = XRegisterFrom(locations->GetTemp(1));
@@ -303,18 +303,18 @@
}
static void GenReverseBytes(LocationSummary* locations,
- Primitive::Type type,
+ DataType::Type type,
MacroAssembler* masm) {
Location in = locations->InAt(0);
Location out = locations->Out();
switch (type) {
- case Primitive::kPrimShort:
+ case DataType::Type::kInt16:
__ Rev16(WRegisterFrom(out), WRegisterFrom(in));
__ Sxth(WRegisterFrom(out), WRegisterFrom(out));
break;
- case Primitive::kPrimInt:
- case Primitive::kPrimLong:
+ case DataType::Type::kInt32:
+ case DataType::Type::kInt64:
__ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
break;
default:
@@ -328,7 +328,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
- GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
+ GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetVIXLAssembler());
}
void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
@@ -336,7 +336,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
- GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
+ GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetVIXLAssembler());
}
void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
@@ -344,7 +344,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
- GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
+ GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetVIXLAssembler());
}
static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
@@ -357,9 +357,9 @@
}
static void GenNumberOfLeadingZeros(LocationSummary* locations,
- Primitive::Type type,
+ DataType::Type type,
MacroAssembler* masm) {
- DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
+ DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Location in = locations->InAt(0);
Location out = locations->Out();
@@ -372,7 +372,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
- GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
+ GenNumberOfLeadingZeros(invoke->GetLocations(), DataType::Type::kInt32, GetVIXLAssembler());
}
void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
@@ -380,13 +380,13 @@
}
void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
- GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
+ GenNumberOfLeadingZeros(invoke->GetLocations(), DataType::Type::kInt64, GetVIXLAssembler());
}
static void GenNumberOfTrailingZeros(LocationSummary* locations,
- Primitive::Type type,
+ DataType::Type type,
MacroAssembler* masm) {
- DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
+ DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Location in = locations->InAt(0);
Location out = locations->Out();
@@ -400,7 +400,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
- GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
+ GenNumberOfTrailingZeros(invoke->GetLocations(), DataType::Type::kInt32, GetVIXLAssembler());
}
void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
@@ -408,13 +408,13 @@
}
void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
- GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
+ GenNumberOfTrailingZeros(invoke->GetLocations(), DataType::Type::kInt64, GetVIXLAssembler());
}
static void GenReverse(LocationSummary* locations,
- Primitive::Type type,
+ DataType::Type type,
MacroAssembler* masm) {
- DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
+ DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Location in = locations->InAt(0);
Location out = locations->Out();
@@ -427,7 +427,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
- GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
+ GenReverse(invoke->GetLocations(), DataType::Type::kInt32, GetVIXLAssembler());
}
void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
@@ -435,19 +435,19 @@
}
void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
- GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
+ GenReverse(invoke->GetLocations(), DataType::Type::kInt64, GetVIXLAssembler());
}
-static void GenBitCount(HInvoke* instr, Primitive::Type type, MacroAssembler* masm) {
- DCHECK(Primitive::IsIntOrLongType(type)) << type;
- DCHECK_EQ(instr->GetType(), Primitive::kPrimInt);
- DCHECK_EQ(Primitive::PrimitiveKind(instr->InputAt(0)->GetType()), type);
+static void GenBitCount(HInvoke* instr, DataType::Type type, MacroAssembler* masm) {
+ DCHECK(DataType::IsIntOrLongType(type)) << type;
+ DCHECK_EQ(instr->GetType(), DataType::Type::kInt32);
+ DCHECK_EQ(DataType::Kind(instr->InputAt(0)->GetType()), type);
UseScratchRegisterScope temps(masm);
Register src = InputRegisterAt(instr, 0);
Register dst = RegisterFrom(instr->GetLocations()->Out(), type);
- FPRegister fpr = (type == Primitive::kPrimLong) ? temps.AcquireD() : temps.AcquireS();
+ FPRegister fpr = (type == DataType::Type::kInt64) ? temps.AcquireD() : temps.AcquireS();
__ Fmov(fpr, src);
__ Cnt(fpr.V8B(), fpr.V8B());
@@ -460,7 +460,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitLongBitCount(HInvoke* invoke) {
- GenBitCount(invoke, Primitive::kPrimLong, GetVIXLAssembler());
+ GenBitCount(invoke, DataType::Type::kInt64, GetVIXLAssembler());
}
void IntrinsicLocationsBuilderARM64::VisitIntegerBitCount(HInvoke* invoke) {
@@ -468,19 +468,19 @@
}
void IntrinsicCodeGeneratorARM64::VisitIntegerBitCount(HInvoke* invoke) {
- GenBitCount(invoke, Primitive::kPrimInt, GetVIXLAssembler());
+ GenBitCount(invoke, DataType::Type::kInt32, GetVIXLAssembler());
}
-static void GenHighestOneBit(HInvoke* invoke, Primitive::Type type, MacroAssembler* masm) {
- DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
+static void GenHighestOneBit(HInvoke* invoke, DataType::Type type, MacroAssembler* masm) {
+ DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
UseScratchRegisterScope temps(masm);
Register src = InputRegisterAt(invoke, 0);
Register dst = RegisterFrom(invoke->GetLocations()->Out(), type);
- Register temp = (type == Primitive::kPrimLong) ? temps.AcquireX() : temps.AcquireW();
- size_t high_bit = (type == Primitive::kPrimLong) ? 63u : 31u;
- size_t clz_high_bit = (type == Primitive::kPrimLong) ? 6u : 5u;
+ Register temp = (type == DataType::Type::kInt64) ? temps.AcquireX() : temps.AcquireW();
+ size_t high_bit = (type == DataType::Type::kInt64) ? 63u : 31u;
+ size_t clz_high_bit = (type == DataType::Type::kInt64) ? 6u : 5u;
__ Clz(temp, src);
__ Mov(dst, UINT64_C(1) << high_bit); // MOV (bitmask immediate)
@@ -493,7 +493,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitIntegerHighestOneBit(HInvoke* invoke) {
- GenHighestOneBit(invoke, Primitive::kPrimInt, GetVIXLAssembler());
+ GenHighestOneBit(invoke, DataType::Type::kInt32, GetVIXLAssembler());
}
void IntrinsicLocationsBuilderARM64::VisitLongHighestOneBit(HInvoke* invoke) {
@@ -501,17 +501,17 @@
}
void IntrinsicCodeGeneratorARM64::VisitLongHighestOneBit(HInvoke* invoke) {
- GenHighestOneBit(invoke, Primitive::kPrimLong, GetVIXLAssembler());
+ GenHighestOneBit(invoke, DataType::Type::kInt64, GetVIXLAssembler());
}
-static void GenLowestOneBit(HInvoke* invoke, Primitive::Type type, MacroAssembler* masm) {
- DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
+static void GenLowestOneBit(HInvoke* invoke, DataType::Type type, MacroAssembler* masm) {
+ DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
UseScratchRegisterScope temps(masm);
Register src = InputRegisterAt(invoke, 0);
Register dst = RegisterFrom(invoke->GetLocations()->Out(), type);
- Register temp = (type == Primitive::kPrimLong) ? temps.AcquireX() : temps.AcquireW();
+ Register temp = (type == DataType::Type::kInt64) ? temps.AcquireX() : temps.AcquireW();
__ Neg(temp, src);
__ And(dst, temp, src);
@@ -522,7 +522,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitIntegerLowestOneBit(HInvoke* invoke) {
- GenLowestOneBit(invoke, Primitive::kPrimInt, GetVIXLAssembler());
+ GenLowestOneBit(invoke, DataType::Type::kInt32, GetVIXLAssembler());
}
void IntrinsicLocationsBuilderARM64::VisitLongLowestOneBit(HInvoke* invoke) {
@@ -530,7 +530,7 @@
}
void IntrinsicCodeGeneratorARM64::VisitLongLowestOneBit(HInvoke* invoke) {
- GenLowestOneBit(invoke, Primitive::kPrimLong, GetVIXLAssembler());
+ GenLowestOneBit(invoke, DataType::Type::kInt64, GetVIXLAssembler());
}
static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
@@ -902,18 +902,18 @@
}
void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
- codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
+ codegen_->Load(DataType::Type::kReference, WRegisterFrom(invoke->GetLocations()->Out()),
MemOperand(tr, Thread::PeerOffset<kArm64PointerSize>().Int32Value()));
}
static void GenUnsafeGet(HInvoke* invoke,
- Primitive::Type type,
+ DataType::Type type,
bool is_volatile,
CodeGeneratorARM64* codegen) {
LocationSummary* locations = invoke->GetLocations();
- DCHECK((type == Primitive::kPrimInt) ||
- (type == Primitive::kPrimLong) ||
- (type == Primitive::kPrimNot));
+ DCHECK((type == DataType::Type::kInt32) ||
+ (type == DataType::Type::kInt64) ||
+ (type == DataType::Type::kReference));
Location base_loc = locations->InAt(1);
Register base = WRegisterFrom(base_loc); // Object pointer.
Location offset_loc = locations->InAt(2);
@@ -921,7 +921,7 @@
Location trg_loc = locations->Out();
Register trg = RegisterFrom(trg_loc, type);
- if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
+ if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
// UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case.
Register temp = WRegisterFrom(locations->GetTemp(0));
codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
@@ -942,7 +942,7 @@
codegen->Load(type, trg, mem_op);
}
- if (type == Primitive::kPrimNot) {
+ if (type == DataType::Type::kReference) {
DCHECK(trg.IsW());
codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0u, offset_loc);
}
@@ -991,22 +991,22 @@
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
- GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
+ GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ false, codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
- GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
+ GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ true, codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
- GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
+ GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ false, codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
- GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
+ GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ true, codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
- GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
+ GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ false, codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
- GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
+ GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ true, codegen_);
}
static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
@@ -1048,7 +1048,7 @@
}
static void GenUnsafePut(HInvoke* invoke,
- Primitive::Type type,
+ DataType::Type type,
bool is_volatile,
bool is_ordered,
CodeGeneratorARM64* codegen) {
@@ -1066,7 +1066,7 @@
// freeing the temporary registers so they can be used in `MarkGCCard`.
UseScratchRegisterScope temps(masm);
- if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ if (kPoisonHeapReferences && type == DataType::Type::kReference) {
DCHECK(value.IsW());
Register temp = temps.AcquireW();
__ Mov(temp.W(), value.W());
@@ -1081,7 +1081,7 @@
}
}
- if (type == Primitive::kPrimNot) {
+ if (type == DataType::Type::kReference) {
bool value_can_be_null = true; // TODO: Worth finding out this information?
codegen->MarkGCCard(base, value, value_can_be_null);
}
@@ -1089,63 +1089,63 @@
void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimInt,
+ DataType::Type::kInt32,
/* is_volatile */ false,
/* is_ordered */ false,
codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimInt,
+ DataType::Type::kInt32,
/* is_volatile */ false,
/* is_ordered */ true,
codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimInt,
+ DataType::Type::kInt32,
/* is_volatile */ true,
/* is_ordered */ false,
codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimNot,
+ DataType::Type::kReference,
/* is_volatile */ false,
/* is_ordered */ false,
codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimNot,
+ DataType::Type::kReference,
/* is_volatile */ false,
/* is_ordered */ true,
codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimNot,
+ DataType::Type::kReference,
/* is_volatile */ true,
/* is_ordered */ false,
codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimLong,
+ DataType::Type::kInt64,
/* is_volatile */ false,
/* is_ordered */ false,
codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimLong,
+ DataType::Type::kInt64,
/* is_volatile */ false,
/* is_ordered */ true,
codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
GenUnsafePut(invoke,
- Primitive::kPrimLong,
+ DataType::Type::kInt64,
/* is_volatile */ true,
/* is_ordered */ false,
codegen_);
@@ -1153,7 +1153,7 @@
static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena,
HInvoke* invoke,
- Primitive::Type type) {
+ DataType::Type type) {
bool can_call = kEmitCompilerReadBarrier &&
kUseBakerReadBarrier &&
(invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
@@ -1172,17 +1172,17 @@
// operations to potentially clobber the output. Likewise when
// emitting a (Baker) read barrier, which may call.
Location::OutputOverlap overlaps =
- ((kPoisonHeapReferences && type == Primitive::kPrimNot) || can_call)
+ ((kPoisonHeapReferences && type == DataType::Type::kReference) || can_call)
? Location::kOutputOverlap
: Location::kNoOutputOverlap;
locations->SetOut(Location::RequiresRegister(), overlaps);
- if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
+ if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
// Temporary register for (Baker) read barrier.
locations->AddTemp(Location::RequiresRegister());
}
}
-static void GenCas(HInvoke* invoke, Primitive::Type type, CodeGeneratorARM64* codegen) {
+static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorARM64* codegen) {
MacroAssembler* masm = codegen->GetVIXLAssembler();
LocationSummary* locations = invoke->GetLocations();
@@ -1196,7 +1196,7 @@
Register value = RegisterFrom(locations->InAt(4), type); // Value.
// This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
- if (type == Primitive::kPrimNot) {
+ if (type == DataType::Type::kReference) {
// Mark card for object assuming new value is stored.
bool value_can_be_null = true; // TODO: Worth finding out this information?
codegen->MarkGCCard(base, value, value_can_be_null);
@@ -1228,7 +1228,7 @@
__ Add(tmp_ptr, base.X(), Operand(offset));
- if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ if (kPoisonHeapReferences && type == DataType::Type::kReference) {
codegen->GetAssembler()->PoisonHeapReference(expected);
if (value.Is(expected)) {
// Do not poison `value`, as it is the same register as
@@ -1253,7 +1253,7 @@
__ Bind(&exit_loop);
__ Cset(out, eq);
- if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ if (kPoisonHeapReferences && type == DataType::Type::kReference) {
codegen->GetAssembler()->UnpoisonHeapReference(expected);
if (value.Is(expected)) {
// Do not unpoison `value`, as it is the same register as
@@ -1265,10 +1265,10 @@
}
void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
- CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt);
+ CreateIntIntIntIntIntToInt(arena_, invoke, DataType::Type::kInt32);
}
void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
- CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong);
+ CreateIntIntIntIntIntToInt(arena_, invoke, DataType::Type::kInt64);
}
void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
// The only read barrier implementation supporting the
@@ -1277,21 +1277,21 @@
return;
}
- CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot);
+ CreateIntIntIntIntIntToInt(arena_, invoke, DataType::Type::kReference);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
- GenCas(invoke, Primitive::kPrimInt, codegen_);
+ GenCas(invoke, DataType::Type::kInt32, codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
- GenCas(invoke, Primitive::kPrimLong, codegen_);
+ GenCas(invoke, DataType::Type::kInt64, codegen_);
}
void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
// The only read barrier implementation supporting the
// UnsafeCASObject intrinsic is the Baker-style read barriers.
DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
- GenCas(invoke, Primitive::kPrimNot, codegen_);
+ GenCas(invoke, DataType::Type::kReference, codegen_);
}
void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
@@ -1397,7 +1397,7 @@
DCHECK_ALIGNED(value_offset, 8);
static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
- const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
+ const size_t char_size = DataType::Size(DataType::Type::kUint16);
DCHECK_EQ(char_size, 2u);
// Promote temp2 to an X reg, ready for LDR.
@@ -1457,7 +1457,7 @@
__ Bind(&different_compression);
// Comparison for different compression style.
- const size_t c_char_size = Primitive::ComponentSize(Primitive::kPrimByte);
+ const size_t c_char_size = DataType::Size(DataType::Type::kInt8);
DCHECK_EQ(c_char_size, 1u);
temp1 = temp1.W();
temp2 = temp2.W();
@@ -1731,7 +1731,7 @@
__ Bind(slow_path->GetExitLabel());
return;
}
- } else if (code_point->GetType() != Primitive::kPrimChar) {
+ } else if (code_point->GetType() != DataType::Type::kUint16) {
Register char_reg = WRegisterFrom(locations->InAt(1));
__ Tst(char_reg, 0xFFFF0000);
slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
@@ -1762,7 +1762,7 @@
InvokeRuntimeCallingConvention calling_convention;
locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
- locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
+ locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kInt32));
// Need to send start_index=0.
locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
@@ -1783,7 +1783,7 @@
locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
- locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
+ locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kInt32));
}
void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
@@ -1800,7 +1800,7 @@
locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
- locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
+ locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
}
void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
@@ -1826,7 +1826,7 @@
locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
- locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
+ locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
}
void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
@@ -1846,7 +1846,7 @@
kIntrinsified);
InvokeRuntimeCallingConvention calling_convention;
locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
- locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
+ locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
}
void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
@@ -1866,8 +1866,8 @@
static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
- DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
- DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
+ DCHECK(DataType::IsFloatingPointType(invoke->InputAt(0)->GetType()));
+ DCHECK(DataType::IsFloatingPointType(invoke->GetType()));
LocationSummary* const locations = new (arena) LocationSummary(invoke,
LocationSummary::kCallOnMainOnly,
@@ -1880,9 +1880,9 @@
static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
- DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
- DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(1)->GetType()));
- DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
+ DCHECK(DataType::IsFloatingPointType(invoke->InputAt(0)->GetType()));
+ DCHECK(DataType::IsFloatingPointType(invoke->InputAt(1)->GetType()));
+ DCHECK(DataType::IsFloatingPointType(invoke->GetType()));
LocationSummary* const locations = new (arena) LocationSummary(invoke,
LocationSummary::kCallOnMainOnly,
@@ -2056,7 +2056,7 @@
LocationSummary* locations = invoke->GetLocations();
// Check assumption that sizeof(Char) is 2 (used in scaling below).
- const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
+ const size_t char_size = DataType::Size(DataType::Type::kUint16);
DCHECK_EQ(char_size, 2u);
// Location of data in char array buffer.
@@ -2135,7 +2135,7 @@
__ B(&done);
if (mirror::kUseStringCompression) {
- const size_t c_char_size = Primitive::ComponentSize(Primitive::kPrimByte);
+ const size_t c_char_size = DataType::Size(DataType::Type::kInt8);
DCHECK_EQ(c_char_size, 1u);
__ Bind(&compressed_string_preloop);
__ Add(src_ptr, src_ptr, Operand(srcBegin));
@@ -2219,7 +2219,7 @@
if (!length_is_input_length) {
// Check that length(input) >= length.
__ Ldr(temp, MemOperand(input, length_offset));
- __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
+ __ Cmp(temp, OperandFrom(length, DataType::Type::kInt32));
__ B(slow_path->GetEntryLabel(), lt);
}
} else {
@@ -2229,7 +2229,7 @@
__ B(slow_path->GetEntryLabel(), lt);
// Check that (length(input) - pos) >= length.
- __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
+ __ Cmp(temp, OperandFrom(length, DataType::Type::kInt32));
__ B(slow_path->GetEntryLabel(), lt);
}
} else if (length_is_input_length) {
@@ -2244,7 +2244,7 @@
__ Ldr(temp, MemOperand(input, length_offset));
__ Subs(temp, temp, pos_reg);
// Ccmp if length(input) >= pos, else definitely bail to slow path (N!=V == lt).
- __ Ccmp(temp, OperandFrom(length, Primitive::kPrimInt), NFlag, ge);
+ __ Ccmp(temp, OperandFrom(length, DataType::Type::kInt32), NFlag, ge);
__ B(slow_path->GetEntryLabel(), lt);
}
}
@@ -2253,7 +2253,7 @@
// source address for System.arraycopy* intrinsics in `src_base`,
// `dst_base` and `src_end` respectively.
static void GenSystemArrayCopyAddresses(MacroAssembler* masm,
- Primitive::Type type,
+ DataType::Type type,
const Register& src,
const Location& src_pos,
const Register& dst,
@@ -2263,10 +2263,10 @@
const Register& dst_base,
const Register& src_end) {
// This routine is used by the SystemArrayCopy and the SystemArrayCopyChar intrinsics.
- DCHECK(type == Primitive::kPrimNot || type == Primitive::kPrimChar)
+ DCHECK(type == DataType::Type::kReference || type == DataType::Type::kUint16)
<< "Unexpected element type: " << type;
- const int32_t element_size = Primitive::ComponentSize(type);
- const int32_t element_size_shift = Primitive::ComponentSizeShift(type);
+ const int32_t element_size = DataType::Size(type);
+ const int32_t element_size_shift = DataType::SizeShift(type);
const uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value();
if (src_pos.IsConstant()) {
@@ -2353,7 +2353,7 @@
src_stop_addr = src_stop_addr.X();
GenSystemArrayCopyAddresses(masm,
- Primitive::kPrimChar,
+ DataType::Type::kUint16,
src,
src_pos,
dst,
@@ -2364,7 +2364,7 @@
src_stop_addr);
// Iterate over the arrays and do a raw copy of the chars.
- const int32_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
+ const int32_t char_size = DataType::Size(DataType::Type::kUint16);
UseScratchRegisterScope temps(masm);
Register tmp = temps.AcquireW();
vixl::aarch64::Label loop, done;
@@ -2781,8 +2781,8 @@
Register dst_curr_addr = temp2.X();
Register src_stop_addr = temp3.X();
vixl::aarch64::Label done;
- const Primitive::Type type = Primitive::kPrimNot;
- const int32_t element_size = Primitive::ComponentSize(type);
+ const DataType::Type type = DataType::Type::kReference;
+ const int32_t element_size = DataType::Size(type);
if (length.IsRegister()) {
// Don't enter the copy loop if the length is null.
@@ -2957,7 +2957,7 @@
IntrinsicVisitor::ComputeIntegerValueOfLocations(
invoke,
codegen_,
- calling_convention.GetReturnLocation(Primitive::kPrimNot),
+ calling_convention.GetReturnLocation(DataType::Type::kReference),
Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
}
@@ -2966,7 +2966,7 @@
LocationSummary* locations = invoke->GetLocations();
MacroAssembler* masm = GetVIXLAssembler();
- Register out = RegisterFrom(locations->Out(), Primitive::kPrimNot);
+ Register out = RegisterFrom(locations->Out(), DataType::Type::kReference);
UseScratchRegisterScope temps(masm);
Register temp = temps.AcquireW();
InvokeRuntimeCallingConvention calling_convention;
@@ -2996,7 +2996,7 @@
codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
}
} else {
- Register in = RegisterFrom(locations->InAt(0), Primitive::kPrimInt);
+ Register in = RegisterFrom(locations->InAt(0), DataType::Type::kInt32);
// Check bounds of our cache.
__ Add(out.W(), in.W(), -info.low);
__ Cmp(out.W(), info.high - info.low + 1);
@@ -3007,8 +3007,8 @@
uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
__ Ldr(temp.W(), codegen_->DeduplicateBootImageAddressLiteral(data_offset + address));
MemOperand source = HeapOperand(
- temp, out.X(), LSL, Primitive::ComponentSizeShift(Primitive::kPrimNot));
- codegen_->Load(Primitive::kPrimNot, out, source);
+ temp, out.X(), LSL, DataType::SizeShift(DataType::Type::kReference));
+ codegen_->Load(DataType::Type::kReference, out, source);
codegen_->GetAssembler()->MaybeUnpoisonHeapReference(out);
__ B(&done);
__ Bind(&allocate);
@@ -3034,7 +3034,7 @@
void IntrinsicCodeGeneratorARM64::VisitThreadInterrupted(HInvoke* invoke) {
MacroAssembler* masm = GetVIXLAssembler();
- Register out = RegisterFrom(invoke->GetLocations()->Out(), Primitive::kPrimInt);
+ Register out = RegisterFrom(invoke->GetLocations()->Out(), DataType::Type::kInt32);
UseScratchRegisterScope temps(masm);
Register temp = temps.AcquireX();