diff options
Diffstat (limited to 'compiler/dex/quick/gen_common.cc')
-rw-r--r-- | compiler/dex/quick/gen_common.cc | 26 |
1 files changed, 8 insertions, 18 deletions
diff --git a/compiler/dex/quick/gen_common.cc b/compiler/dex/quick/gen_common.cc index 2cd17ccffc..395cff7d61 100644 --- a/compiler/dex/quick/gen_common.cc +++ b/compiler/dex/quick/gen_common.cc @@ -564,13 +564,8 @@ void Mir2Lir::GenSput(MIR* mir, RegLocation rl_src, bool is_long_or_double, // There might have been a store before this volatile one so insert StoreStore barrier. GenMemBarrier(kStoreStore); } - if (is_long_or_double) { - StoreBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg); - } else if (rl_src.ref) { - StoreRefDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg); - } else { - Store32Disp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg); - } + OpSize size = LoadStoreOpSize(is_long_or_double, rl_src.ref); + StoreBaseDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg, size); if (field_info.IsVolatile()) { // A load might follow the volatile store so insert a StoreLoad barrier. GenMemBarrier(kStoreLoad); @@ -646,13 +641,8 @@ void Mir2Lir::GenSget(MIR* mir, RegLocation rl_dest, } RegLocation rl_result = EvalLoc(rl_dest, result_reg_kind, true); - if (is_long_or_double) { - LoadBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg, INVALID_SREG); - } else if (rl_result.ref) { - LoadRefDisp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg); - } else { - Load32Disp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg); - } + OpSize size = LoadStoreOpSize(is_long_or_double, rl_result.ref); + LoadBaseDisp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg, size, INVALID_SREG); FreeTemp(r_base); if (field_info.IsVolatile()) { @@ -714,8 +704,8 @@ void Mir2Lir::GenIGet(MIR* mir, int opt_flags, OpSize size, result_reg_kind = kFPReg; } rl_result = EvalLoc(rl_dest, result_reg_kind, true); - LoadBaseDispWide(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_result.reg, - rl_obj.s_reg_low); + LoadBaseDisp(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_result.reg, + size, rl_obj.s_reg_low); MarkPossibleNullPointerException(opt_flags); if (field_info.IsVolatile()) { // Without context sensitive analysis, we must issue the most conservative barriers. @@ -727,7 +717,7 @@ void Mir2Lir::GenIGet(MIR* mir, int opt_flags, OpSize size, RegStorage reg_ptr = AllocTemp(); OpRegRegImm(kOpAdd, reg_ptr, rl_obj.reg, field_info.FieldOffset().Int32Value()); rl_result = EvalLoc(rl_dest, reg_class, true); - LoadBaseDispWide(reg_ptr, 0, rl_result.reg, INVALID_SREG); + LoadBaseDisp(reg_ptr, 0, rl_result.reg, size, INVALID_SREG); MarkPossibleNullPointerException(opt_flags); if (field_info.IsVolatile()) { // Without context sensitive analysis, we must issue the most conservative barriers. @@ -791,7 +781,7 @@ void Mir2Lir::GenIPut(MIR* mir, int opt_flags, OpSize size, // There might have been a store before this volatile one so insert StoreStore barrier. GenMemBarrier(kStoreStore); } - StoreBaseDispWide(reg_ptr, 0, rl_src.reg); + StoreBaseDisp(reg_ptr, 0, rl_src.reg, size); MarkPossibleNullPointerException(opt_flags); if (field_info.IsVolatile()) { // A load might follow the volatile store so insert a StoreLoad barrier. |