diff options
author | 2014-05-06 20:49:36 +0100 | |
---|---|---|
committer | 2014-05-07 10:10:19 +0100 | |
commit | 455759b5702b9435b91d1b4dada22c4cce7cae3c (patch) | |
tree | 73df437e7b8e1ca1b78be8d7fb2d38fec01b9dee /compiler/dex/quick/gen_common.cc | |
parent | 7189fee4268c70d7ed0151e988ff7c7cd85f2a30 (diff) |
Remove LoadBaseDispWide and StoreBaseDispWide.
Just pass k64 or kDouble to non-wide versions.
Change-Id: I000619c3b78d3a71db42edc747c8a0ba1ee229be
Diffstat (limited to 'compiler/dex/quick/gen_common.cc')
-rw-r--r-- | compiler/dex/quick/gen_common.cc | 26 |
1 files changed, 8 insertions, 18 deletions
diff --git a/compiler/dex/quick/gen_common.cc b/compiler/dex/quick/gen_common.cc index 2cd17ccffc..395cff7d61 100644 --- a/compiler/dex/quick/gen_common.cc +++ b/compiler/dex/quick/gen_common.cc @@ -564,13 +564,8 @@ void Mir2Lir::GenSput(MIR* mir, RegLocation rl_src, bool is_long_or_double, // There might have been a store before this volatile one so insert StoreStore barrier. GenMemBarrier(kStoreStore); } - if (is_long_or_double) { - StoreBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg); - } else if (rl_src.ref) { - StoreRefDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg); - } else { - Store32Disp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg); - } + OpSize size = LoadStoreOpSize(is_long_or_double, rl_src.ref); + StoreBaseDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg, size); if (field_info.IsVolatile()) { // A load might follow the volatile store so insert a StoreLoad barrier. GenMemBarrier(kStoreLoad); @@ -646,13 +641,8 @@ void Mir2Lir::GenSget(MIR* mir, RegLocation rl_dest, } RegLocation rl_result = EvalLoc(rl_dest, result_reg_kind, true); - if (is_long_or_double) { - LoadBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg, INVALID_SREG); - } else if (rl_result.ref) { - LoadRefDisp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg); - } else { - Load32Disp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg); - } + OpSize size = LoadStoreOpSize(is_long_or_double, rl_result.ref); + LoadBaseDisp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg, size, INVALID_SREG); FreeTemp(r_base); if (field_info.IsVolatile()) { @@ -714,8 +704,8 @@ void Mir2Lir::GenIGet(MIR* mir, int opt_flags, OpSize size, result_reg_kind = kFPReg; } rl_result = EvalLoc(rl_dest, result_reg_kind, true); - LoadBaseDispWide(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_result.reg, - rl_obj.s_reg_low); + LoadBaseDisp(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_result.reg, + size, rl_obj.s_reg_low); MarkPossibleNullPointerException(opt_flags); if (field_info.IsVolatile()) { // Without context sensitive analysis, we must issue the most conservative barriers. @@ -727,7 +717,7 @@ void Mir2Lir::GenIGet(MIR* mir, int opt_flags, OpSize size, RegStorage reg_ptr = AllocTemp(); OpRegRegImm(kOpAdd, reg_ptr, rl_obj.reg, field_info.FieldOffset().Int32Value()); rl_result = EvalLoc(rl_dest, reg_class, true); - LoadBaseDispWide(reg_ptr, 0, rl_result.reg, INVALID_SREG); + LoadBaseDisp(reg_ptr, 0, rl_result.reg, size, INVALID_SREG); MarkPossibleNullPointerException(opt_flags); if (field_info.IsVolatile()) { // Without context sensitive analysis, we must issue the most conservative barriers. @@ -791,7 +781,7 @@ void Mir2Lir::GenIPut(MIR* mir, int opt_flags, OpSize size, // There might have been a store before this volatile one so insert StoreStore barrier. GenMemBarrier(kStoreStore); } - StoreBaseDispWide(reg_ptr, 0, rl_src.reg); + StoreBaseDisp(reg_ptr, 0, rl_src.reg, size); MarkPossibleNullPointerException(opt_flags); if (field_info.IsVolatile()) { // A load might follow the volatile store so insert a StoreLoad barrier. |