Use atomic load/store for volatile IGET/IPUT/SGET/SPUT.
Bug: 14112919
Change-Id: I79316f438dd3adea9b2653ffc968af83671ad282
diff --git a/compiler/dex/quick/mips/codegen_mips.h b/compiler/dex/quick/mips/codegen_mips.h
index 20fd4b1..90d5a28 100644
--- a/compiler/dex/quick/mips/codegen_mips.h
+++ b/compiler/dex/quick/mips/codegen_mips.h
@@ -32,6 +32,8 @@
bool EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) OVERRIDE;
LIR* CheckSuspendUsingLoad() OVERRIDE;
RegStorage LoadHelper(ThreadOffset<4> offset);
+ LIR* LoadBaseDispVolatile(RegStorage r_base, int displacement, RegStorage r_dest,
+ OpSize size) OVERRIDE;
LIR* LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
OpSize size) OVERRIDE;
LIR* LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, int scale,
@@ -40,6 +42,8 @@
RegStorage r_dest, OpSize size) OVERRIDE;
LIR* LoadConstantNoClobber(RegStorage r_dest, int value);
LIR* LoadConstantWide(RegStorage r_dest, int64_t value);
+ LIR* StoreBaseDispVolatile(RegStorage r_base, int displacement, RegStorage r_src,
+ OpSize size) OVERRIDE;
LIR* StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src,
OpSize size) OVERRIDE;
LIR* StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, int scale,
@@ -84,6 +88,11 @@
int GetInsnSize(LIR* lir);
bool IsUnconditionalBranch(LIR* lir);
+ // Check support for volatile load/store of a given size.
+ bool SupportsVolatileLoadStore(OpSize size) OVERRIDE;
+ // Get the register class for load/store of a field.
+ RegisterClass RegClassForFieldLoadStore(OpSize size, bool is_volatile) OVERRIDE;
+
// Required for target - Dalvik-level generators.
void GenArithImmOpLong(Instruction::Code opcode, RegLocation rl_dest,
RegLocation rl_src1, RegLocation rl_src2);
diff --git a/compiler/dex/quick/mips/target_mips.cc b/compiler/dex/quick/mips/target_mips.cc
index 8d91aba..570c220 100644
--- a/compiler/dex/quick/mips/target_mips.cc
+++ b/compiler/dex/quick/mips/target_mips.cc
@@ -555,6 +555,18 @@
return (lir->opcode == kMipsB);
}
+bool MipsMir2Lir::SupportsVolatileLoadStore(OpSize size) {
+ // No support for 64-bit atomic load/store on mips.
+ return size != k64 && size != kDouble;
+}
+
+RegisterClass MipsMir2Lir::RegClassForFieldLoadStore(OpSize size, bool is_volatile) {
+ // No support for 64-bit atomic load/store on mips.
+ DCHECK(size != k64 && size != kDouble);
+ // TODO: Verify that both core and fp registers are suitable for smaller sizes.
+ return RegClassBySize(size);
+}
+
MipsMir2Lir::MipsMir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
: Mir2Lir(cu, mir_graph, arena) {
for (int i = 0; i < kMipsLast; i++) {
diff --git a/compiler/dex/quick/mips/utility_mips.cc b/compiler/dex/quick/mips/utility_mips.cc
index 8397411..58fbace 100644
--- a/compiler/dex/quick/mips/utility_mips.cc
+++ b/compiler/dex/quick/mips/utility_mips.cc
@@ -545,6 +545,12 @@
return load;
}
+LIR* MipsMir2Lir::LoadBaseDispVolatile(RegStorage r_base, int displacement, RegStorage r_dest,
+ OpSize size) {
+ DCHECK(size != k64 && size != kDouble);
+ return LoadBaseDisp(r_base, displacement, r_dest, size);
+}
+
LIR* MipsMir2Lir::LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
OpSize size) {
// TODO: base this on target.
@@ -640,6 +646,12 @@
return res;
}
+LIR* MipsMir2Lir::StoreBaseDispVolatile(RegStorage r_base, int displacement, RegStorage r_src,
+ OpSize size) {
+ DCHECK(size != k64 && size != kDouble);
+ return StoreBaseDisp(r_base, displacement, r_src, size);
+}
+
LIR* MipsMir2Lir::StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src,
OpSize size) {
// TODO: base this on target.