Compile-time tuning: assembly phase

Not as much compile-time gain from reworking the assembly phase as I'd
hoped, but still worthwhile.  Should see ~2% improvement thanks to
the assembly rework.  On the other hand, expect some huge gains for some
application thanks to better detection of large machine-generated init
methods.  Thinkfree shows a 25% improvement.

The major assembly change was to establish thread the LIR nodes that
require fixup into a fixup chain.  Only those are processed during the
final assembly pass(es).  This doesn't help for methods which only
require a single pass to assemble, but does speed up the larger methods
which required multiple assembly passes.

Also replaced the block_map_ basic block lookup table (which contained
space for a BasicBlock* for each dex instruction unit) with a block id
map - cutting its space requirements by half in a 32-bit pointer
environment.

Changes:
  o Reduce size of LIR struct by 12.5% (one of the big memory users)
  o Repurpose the use/def portion of the LIR after optimization complete.
  o Encode instruction bits to LIR
  o Thread LIR nodes requiring pc fixup
  o Change follow-on assembly passes to only consider fixup LIRs
  o Switch on pc-rel fixup kind
  o Fast-path for small methods - single pass assembly
  o Avoid using cb[n]z for null checks (almost always exceed displacement)
  o Improve detection of large initialization methods.
  o Rework def/use flag setup.
  o Remove a sequential search from FindBlock using lookup table of 16-bit
    block ids rather than full block pointers.
  o Eliminate pcRelFixup and use fixup kind instead.
  o Add check for 16-bit overflow on dex offset.

Change-Id: I4c6615f83fed46f84629ad6cfe4237205a9562b4
diff --git a/compiler/dex/quick/arm/arm_lir.h b/compiler/dex/quick/arm/arm_lir.h
index 2f54190..d184673 100644
--- a/compiler/dex/quick/arm/arm_lir.h
+++ b/compiler/dex/quick/arm/arm_lir.h
@@ -462,7 +462,7 @@
 
 // Instruction assembly field_loc kind.
 enum ArmEncodingKind {
-  kFmtUnused,
+  kFmtUnused,    // Unused field and marks end of formats.
   kFmtBitBlt,    // Bit string using end/start.
   kFmtDfp,       // Double FP reg.
   kFmtSfp,       // Single FP reg.
@@ -477,6 +477,7 @@
   kFmtBrOffset,  // Signed extended [26,11,13,21-16,10-0]:0.
   kFmtFPImm,     // Encoded floating point immediate.
   kFmtOff24,     // 24-bit Thumb2 unconditional branch encoding.
+  kFmtSkip,      // Unused field, but continue to next.
 };
 
 // Struct used to define the snippet positions for each Thumb opcode.
@@ -492,6 +493,7 @@
   const char* name;
   const char* fmt;
   int size;   // Note: size is in bytes.
+  FixupKind fixup;
 };
 
 }  // namespace art
diff --git a/compiler/dex/quick/arm/assemble_arm.cc b/compiler/dex/quick/arm/assemble_arm.cc
index 2d69d93..dac3a21 100644
--- a/compiler/dex/quick/arm/assemble_arm.cc
+++ b/compiler/dex/quick/arm/assemble_arm.cc
@@ -37,9 +37,9 @@
  * fmt: for pretty-printing
  */
 #define ENCODING_MAP(opcode, skeleton, k0, ds, de, k1, s1s, s1e, k2, s2s, s2e, \
-                     k3, k3s, k3e, flags, name, fmt, size) \
+                     k3, k3s, k3e, flags, name, fmt, size, fixup) \
         {skeleton, {{k0, ds, de}, {k1, s1s, s1e}, {k2, s2s, s2e}, \
-                    {k3, k3s, k3e}}, opcode, flags, name, fmt, size}
+                    {k3, k3s, k3e}}, opcode, flags, name, fmt, size, fixup}
 
 /* Instruction dump string format keys: !pf, where "!" is the start
  * of the key, "p" is which numeric operand to use and "f" is the
@@ -79,916 +79,938 @@
 const ArmEncodingMap ArmMir2Lir::EncodingMap[kArmLast] = {
     ENCODING_MAP(kArm16BitData,    0x0000,
                  kFmtBitBlt, 15, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
-                 kFmtUnused, -1, -1, IS_UNARY_OP, "data", "0x!0h(!0d)", 2),
+                 kFmtUnused, -1, -1, IS_UNARY_OP, "data", "0x!0h(!0d)", 2, kFixupNone),
     ENCODING_MAP(kThumbAdcRR,        0x4140,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES | USES_CCODES,
-                 "adcs", "!0C, !1C", 2),
+                 "adcs", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbAddRRI3,      0x1c00,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "adds", "!0C, !1C, #!2d", 2),
+                 "adds", "!0C, !1C, #!2d", 2, kFixupNone),
     ENCODING_MAP(kThumbAddRI8,       0x3000,
                  kFmtBitBlt, 10, 8, kFmtBitBlt, 7, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE0 | SETS_CCODES,
-                 "adds", "!0C, !0C, #!1d", 2),
+                 "adds", "!0C, !0C, #!1d", 2, kFixupNone),
     ENCODING_MAP(kThumbAddRRR,       0x1800,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE12 | SETS_CCODES,
-                 "adds", "!0C, !1C, !2C", 2),
+                 "adds", "!0C, !1C, !2C", 2, kFixupNone),
     ENCODING_MAP(kThumbAddRRLH,     0x4440,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE01,
-                 "add", "!0C, !1C", 2),
+                 "add", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbAddRRHL,     0x4480,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE01,
-                 "add", "!0C, !1C", 2),
+                 "add", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbAddRRHH,     0x44c0,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE01,
-                 "add", "!0C, !1C", 2),
+                 "add", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbAddPcRel,    0xa000,
                  kFmtBitBlt, 10, 8, kFmtBitBlt, 7, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | IS_BRANCH | NEEDS_FIXUP,
-                 "add", "!0C, pc, #!1E", 2),
+                 "add", "!0C, pc, #!1E", 2, kFixupLoad),
     ENCODING_MAP(kThumbAddSpRel,    0xa800,
-                 kFmtBitBlt, 10, 8, kFmtUnused, -1, -1, kFmtBitBlt, 7, 0,
+                 kFmtBitBlt, 10, 8, kFmtSkip, -1, -1, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF_SP | REG_USE_SP,
-                 "add", "!0C, sp, #!2E", 2),
+                 "add", "!0C, sp, #!2E", 2, kFixupNone),
     ENCODING_MAP(kThumbAddSpI7,      0xb000,
                  kFmtBitBlt, 6, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_UNARY_OP | REG_DEF_SP | REG_USE_SP,
-                 "add", "sp, #!0d*4", 2),
+                 "add", "sp, #!0d*4", 2, kFixupNone),
     ENCODING_MAP(kThumbAndRR,        0x4000,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "ands", "!0C, !1C", 2),
+                 "ands", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbAsrRRI5,      0x1000,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "asrs", "!0C, !1C, #!2d", 2),
+                 "asrs", "!0C, !1C, #!2d", 2, kFixupNone),
     ENCODING_MAP(kThumbAsrRR,        0x4100,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "asrs", "!0C, !1C", 2),
+                 "asrs", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbBCond,        0xd000,
                  kFmtBitBlt, 7, 0, kFmtBitBlt, 11, 8, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | IS_BRANCH | USES_CCODES |
-                 NEEDS_FIXUP, "b!1c", "!0t", 2),
+                 NEEDS_FIXUP, "b!1c", "!0t", 2, kFixupCondBranch),
     ENCODING_MAP(kThumbBUncond,      0xe000,
                  kFmtBitBlt, 10, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_UNARY_OP | IS_BRANCH | NEEDS_FIXUP,
-                 "b", "!0t", 2),
+                 "b", "!0t", 2, kFixupT1Branch),
     ENCODING_MAP(kThumbBicRR,        0x4380,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "bics", "!0C, !1C", 2),
+                 "bics", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbBkpt,          0xbe00,
                  kFmtBitBlt, 7, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_UNARY_OP | IS_BRANCH,
-                 "bkpt", "!0d", 2),
+                 "bkpt", "!0d", 2, kFixupNone),
     ENCODING_MAP(kThumbBlx1,         0xf000,
                  kFmtBitBlt, 10, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | IS_BRANCH | REG_DEF_LR |
-                 NEEDS_FIXUP, "blx_1", "!0u", 2),
+                 NEEDS_FIXUP, "blx_1", "!0u", 2, kFixupBlx1),
     ENCODING_MAP(kThumbBlx2,         0xe800,
                  kFmtBitBlt, 10, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | IS_BRANCH | REG_DEF_LR |
-                 NEEDS_FIXUP, "blx_2", "!0v", 2),
+                 NEEDS_FIXUP, "blx_2", "!0v", 2, kFixupLabel),
     ENCODING_MAP(kThumbBl1,          0xf000,
                  kFmtBitBlt, 10, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
-                 kFmtUnused, -1, -1, IS_UNARY_OP | IS_BRANCH | REG_DEF_LR,
-                 "bl_1", "!0u", 2),
+                 kFmtUnused, -1, -1, IS_UNARY_OP | IS_BRANCH | REG_DEF_LR | NEEDS_FIXUP,
+                 "bl_1", "!0u", 2, kFixupBl1),
     ENCODING_MAP(kThumbBl2,          0xf800,
                  kFmtBitBlt, 10, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
-                 kFmtUnused, -1, -1, IS_UNARY_OP | IS_BRANCH | REG_DEF_LR,
-                 "bl_2", "!0v", 2),
+                 kFmtUnused, -1, -1, IS_UNARY_OP | IS_BRANCH | REG_DEF_LR | NEEDS_FIXUP,
+                 "bl_2", "!0v", 2, kFixupLabel),
     ENCODING_MAP(kThumbBlxR,         0x4780,
                  kFmtBitBlt, 6, 3, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_USE0 | IS_BRANCH | REG_DEF_LR,
-                 "blx", "!0C", 2),
+                 "blx", "!0C", 2, kFixupNone),
     ENCODING_MAP(kThumbBx,            0x4700,
                  kFmtBitBlt, 6, 3, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_UNARY_OP | IS_BRANCH,
-                 "bx", "!0C", 2),
+                 "bx", "!0C", 2, kFixupNone),
     ENCODING_MAP(kThumbCmnRR,        0x42c0,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE01 | SETS_CCODES,
-                 "cmn", "!0C, !1C", 2),
+                 "cmn", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbCmpRI8,       0x2800,
                  kFmtBitBlt, 10, 8, kFmtBitBlt, 7, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE0 | SETS_CCODES,
-                 "cmp", "!0C, #!1d", 2),
+                 "cmp", "!0C, #!1d", 2, kFixupNone),
     ENCODING_MAP(kThumbCmpRR,        0x4280,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE01 | SETS_CCODES,
-                 "cmp", "!0C, !1C", 2),
+                 "cmp", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbCmpLH,        0x4540,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE01 | SETS_CCODES,
-                 "cmp", "!0C, !1C", 2),
+                 "cmp", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbCmpHL,        0x4580,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE01 | SETS_CCODES,
-                 "cmp", "!0C, !1C", 2),
+                 "cmp", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbCmpHH,        0x45c0,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE01 | SETS_CCODES,
-                 "cmp", "!0C, !1C", 2),
+                 "cmp", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbEorRR,        0x4040,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "eors", "!0C, !1C", 2),
+                 "eors", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbLdmia,         0xc800,
                  kFmtBitBlt, 10, 8, kFmtBitBlt, 7, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE0 | REG_DEF_LIST1 | IS_LOAD,
-                 "ldmia", "!0C!!, <!1R>", 2),
+                 "ldmia", "!0C!!, <!1R>", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrRRI5,      0x6800,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldr", "!0C, [!1C, #!2E]", 2),
+                 "ldr", "!0C, [!1C, #!2E]", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrRRR,       0x5800,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldr", "!0C, [!1C, !2C]", 2),
+                 "ldr", "!0C, [!1C, !2C]", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrPcRel,    0x4800,
                  kFmtBitBlt, 10, 8, kFmtBitBlt, 7, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0 | REG_USE_PC
-                 | IS_LOAD | NEEDS_FIXUP, "ldr", "!0C, [pc, #!1E]", 2),
+                 | IS_LOAD | NEEDS_FIXUP, "ldr", "!0C, [pc, #!1E]", 2, kFixupLoad),
     ENCODING_MAP(kThumbLdrSpRel,    0x9800,
-                 kFmtBitBlt, 10, 8, kFmtUnused, -1, -1, kFmtBitBlt, 7, 0,
+                 kFmtBitBlt, 10, 8, kFmtSkip, -1, -1, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0 | REG_USE_SP
-                 | IS_LOAD, "ldr", "!0C, [sp, #!2E]", 2),
+                 | IS_LOAD, "ldr", "!0C, [sp, #!2E]", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrbRRI5,     0x7800,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldrb", "!0C, [!1C, #2d]", 2),
+                 "ldrb", "!0C, [!1C, #2d]", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrbRRR,      0x5c00,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldrb", "!0C, [!1C, !2C]", 2),
+                 "ldrb", "!0C, [!1C, !2C]", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrhRRI5,     0x8800,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldrh", "!0C, [!1C, #!2F]", 2),
+                 "ldrh", "!0C, [!1C, #!2F]", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrhRRR,      0x5a00,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldrh", "!0C, [!1C, !2C]", 2),
+                 "ldrh", "!0C, [!1C, !2C]", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrsbRRR,     0x5600,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldrsb", "!0C, [!1C, !2C]", 2),
+                 "ldrsb", "!0C, [!1C, !2C]", 2, kFixupNone),
     ENCODING_MAP(kThumbLdrshRRR,     0x5e00,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldrsh", "!0C, [!1C, !2C]", 2),
+                 "ldrsh", "!0C, [!1C, !2C]", 2, kFixupNone),
     ENCODING_MAP(kThumbLslRRI5,      0x0000,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "lsls", "!0C, !1C, #!2d", 2),
+                 "lsls", "!0C, !1C, #!2d", 2, kFixupNone),
     ENCODING_MAP(kThumbLslRR,        0x4080,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "lsls", "!0C, !1C", 2),
+                 "lsls", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbLsrRRI5,      0x0800,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "lsrs", "!0C, !1C, #!2d", 2),
+                 "lsrs", "!0C, !1C, #!2d", 2, kFixupNone),
     ENCODING_MAP(kThumbLsrRR,        0x40c0,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "lsrs", "!0C, !1C", 2),
+                 "lsrs", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbMovImm,       0x2000,
                  kFmtBitBlt, 10, 8, kFmtBitBlt, 7, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0 | SETS_CCODES,
-                 "movs", "!0C, #!1d", 2),
+                 "movs", "!0C, #!1d", 2, kFixupNone),
     ENCODING_MAP(kThumbMovRR,        0x1c00,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "movs", "!0C, !1C", 2),
+                 "movs", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbMovRR_H2H,    0x46c0,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "mov", "!0C, !1C", 2),
+                 "mov", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbMovRR_H2L,    0x4640,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "mov", "!0C, !1C", 2),
+                 "mov", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbMovRR_L2H,    0x4680,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "mov", "!0C, !1C", 2),
+                 "mov", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbMul,           0x4340,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "muls", "!0C, !1C", 2),
+                 "muls", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbMvn,           0x43c0,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "mvns", "!0C, !1C", 2),
+                 "mvns", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbNeg,           0x4240,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "negs", "!0C, !1C", 2),
+                 "negs", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbOrr,           0x4300,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "orrs", "!0C, !1C", 2),
+                 "orrs", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbPop,           0xbc00,
                  kFmtBitBlt, 8, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP | REG_DEF_LIST0
-                 | IS_LOAD, "pop", "<!0R>", 2),
+                 | IS_LOAD, "pop", "<!0R>", 2, kFixupNone),
     ENCODING_MAP(kThumbPush,          0xb400,
                  kFmtBitBlt, 8, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP | REG_USE_LIST0
-                 | IS_STORE, "push", "<!0R>", 2),
+                 | IS_STORE, "push", "<!0R>", 2, kFixupNone),
     ENCODING_MAP(kThumbRorRR,        0x41c0,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | SETS_CCODES,
-                 "rors", "!0C, !1C", 2),
+                 "rors", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbSbc,           0x4180,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE01 | USES_CCODES | SETS_CCODES,
-                 "sbcs", "!0C, !1C", 2),
+                 "sbcs", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumbStmia,         0xc000,
                  kFmtBitBlt, 10, 8, kFmtBitBlt, 7, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0 | REG_USE0 | REG_USE_LIST1 | IS_STORE,
-                 "stmia", "!0C!!, <!1R>", 2),
+                 "stmia", "!0C!!, <!1R>", 2, kFixupNone),
     ENCODING_MAP(kThumbStrRRI5,      0x6000,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "str", "!0C, [!1C, #!2E]", 2),
+                 "str", "!0C, [!1C, #!2E]", 2, kFixupNone),
     ENCODING_MAP(kThumbStrRRR,       0x5000,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE012 | IS_STORE,
-                 "str", "!0C, [!1C, !2C]", 2),
+                 "str", "!0C, [!1C, !2C]", 2, kFixupNone),
     ENCODING_MAP(kThumbStrSpRel,    0x9000,
-                 kFmtBitBlt, 10, 8, kFmtUnused, -1, -1, kFmtBitBlt, 7, 0,
+                 kFmtBitBlt, 10, 8, kFmtSkip, -1, -1, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE0 | REG_USE_SP
-                 | IS_STORE, "str", "!0C, [sp, #!2E]", 2),
+                 | IS_STORE, "str", "!0C, [sp, #!2E]", 2, kFixupNone),
     ENCODING_MAP(kThumbStrbRRI5,     0x7000,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "strb", "!0C, [!1C, #!2d]", 2),
+                 "strb", "!0C, [!1C, #!2d]", 2, kFixupNone),
     ENCODING_MAP(kThumbStrbRRR,      0x5400,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE012 | IS_STORE,
-                 "strb", "!0C, [!1C, !2C]", 2),
+                 "strb", "!0C, [!1C, !2C]", 2, kFixupNone),
     ENCODING_MAP(kThumbStrhRRI5,     0x8000,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 10, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "strh", "!0C, [!1C, #!2F]", 2),
+                 "strh", "!0C, [!1C, #!2F]", 2, kFixupNone),
     ENCODING_MAP(kThumbStrhRRR,      0x5200,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE012 | IS_STORE,
-                 "strh", "!0C, [!1C, !2C]", 2),
+                 "strh", "!0C, [!1C, !2C]", 2, kFixupNone),
     ENCODING_MAP(kThumbSubRRI3,      0x1e00,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "subs", "!0C, !1C, #!2d", 2),
+                 "subs", "!0C, !1C, #!2d", 2, kFixupNone),
     ENCODING_MAP(kThumbSubRI8,       0x3800,
                  kFmtBitBlt, 10, 8, kFmtBitBlt, 7, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE0 | SETS_CCODES,
-                 "subs", "!0C, #!1d", 2),
+                 "subs", "!0C, #!1d", 2, kFixupNone),
     ENCODING_MAP(kThumbSubRRR,       0x1a00,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtBitBlt, 8, 6,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE12 | SETS_CCODES,
-                 "subs", "!0C, !1C, !2C", 2),
+                 "subs", "!0C, !1C, !2C", 2, kFixupNone),
     ENCODING_MAP(kThumbSubSpI7,      0xb080,
                  kFmtBitBlt, 6, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP,
-                 "sub", "sp, #!0d*4", 2),
+                 "sub", "sp, #!0d*4", 2, kFixupNone),
     ENCODING_MAP(kThumbSwi,           0xdf00,
                  kFmtBitBlt, 7, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_UNARY_OP | IS_BRANCH,
-                 "swi", "!0d", 2),
+                 "swi", "!0d", 2, kFixupNone),
     ENCODING_MAP(kThumbTst,           0x4200,
                  kFmtBitBlt, 2, 0, kFmtBitBlt, 5, 3, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_UNARY_OP | REG_USE01 | SETS_CCODES,
-                 "tst", "!0C, !1C", 2),
+                 "tst", "!0C, !1C", 2, kFixupNone),
     ENCODING_MAP(kThumb2Vldrs,       0xed900a00,
                  kFmtSfp, 22, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD |
-                 REG_DEF_LR | NEEDS_FIXUP, "vldr", "!0s, [!1C, #!2E]", 4),
+                 REG_DEF_LR | NEEDS_FIXUP, "vldr", "!0s, [!1C, #!2E]", 4, kFixupVLoad),
     ENCODING_MAP(kThumb2Vldrd,       0xed900b00,
                  kFmtDfp, 22, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD |
-                 REG_DEF_LR | NEEDS_FIXUP, "vldr", "!0S, [!1C, #!2E]", 4),
+                 REG_DEF_LR | NEEDS_FIXUP, "vldr", "!0S, [!1C, #!2E]", 4, kFixupVLoad),
     ENCODING_MAP(kThumb2Vmuls,        0xee200a00,
                  kFmtSfp, 22, 12, kFmtSfp, 7, 16, kFmtSfp, 5, 0,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "vmuls", "!0s, !1s, !2s", 4),
+                 "vmuls", "!0s, !1s, !2s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vmuld,        0xee200b00,
                  kFmtDfp, 22, 12, kFmtDfp, 7, 16, kFmtDfp, 5, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "vmuld", "!0S, !1S, !2S", 4),
+                 "vmuld", "!0S, !1S, !2S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vstrs,       0xed800a00,
                  kFmtSfp, 22, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "vstr", "!0s, [!1C, #!2E]", 4),
+                 "vstr", "!0s, [!1C, #!2E]", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vstrd,       0xed800b00,
                  kFmtDfp, 22, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "vstr", "!0S, [!1C, #!2E]", 4),
+                 "vstr", "!0S, [!1C, #!2E]", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vsubs,        0xee300a40,
                  kFmtSfp, 22, 12, kFmtSfp, 7, 16, kFmtSfp, 5, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "vsub", "!0s, !1s, !2s", 4),
+                 "vsub", "!0s, !1s, !2s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vsubd,        0xee300b40,
                  kFmtDfp, 22, 12, kFmtDfp, 7, 16, kFmtDfp, 5, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "vsub", "!0S, !1S, !2S", 4),
+                 "vsub", "!0S, !1S, !2S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vadds,        0xee300a00,
                  kFmtSfp, 22, 12, kFmtSfp, 7, 16, kFmtSfp, 5, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "vadd", "!0s, !1s, !2s", 4),
+                 "vadd", "!0s, !1s, !2s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vaddd,        0xee300b00,
                  kFmtDfp, 22, 12, kFmtDfp, 7, 16, kFmtDfp, 5, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "vadd", "!0S, !1S, !2S", 4),
+                 "vadd", "!0S, !1S, !2S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vdivs,        0xee800a00,
                  kFmtSfp, 22, 12, kFmtSfp, 7, 16, kFmtSfp, 5, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "vdivs", "!0s, !1s, !2s", 4),
+                 "vdivs", "!0s, !1s, !2s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vdivd,        0xee800b00,
                  kFmtDfp, 22, 12, kFmtDfp, 7, 16, kFmtDfp, 5, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "vdivd", "!0S, !1S, !2S", 4),
+                 "vdivd", "!0S, !1S, !2S", 4, kFixupNone),
     ENCODING_MAP(kThumb2VcvtIF,       0xeeb80ac0,
                  kFmtSfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vcvt.f32", "!0s, !1s", 4),
+                 "vcvt.f32", "!0s, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2VcvtID,       0xeeb80bc0,
                  kFmtDfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vcvt.f64", "!0S, !1s", 4),
+                 "vcvt.f64", "!0S, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2VcvtFI,       0xeebd0ac0,
                  kFmtSfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vcvt.s32.f32 ", "!0s, !1s", 4),
+                 "vcvt.s32.f32 ", "!0s, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2VcvtDI,       0xeebd0bc0,
                  kFmtSfp, 22, 12, kFmtDfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vcvt.s32.f64 ", "!0s, !1S", 4),
+                 "vcvt.s32.f64 ", "!0s, !1S", 4, kFixupNone),
     ENCODING_MAP(kThumb2VcvtFd,       0xeeb70ac0,
                  kFmtDfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vcvt.f64.f32 ", "!0S, !1s", 4),
+                 "vcvt.f64.f32 ", "!0S, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2VcvtDF,       0xeeb70bc0,
                  kFmtSfp, 22, 12, kFmtDfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vcvt.f32.f64 ", "!0s, !1S", 4),
+                 "vcvt.f32.f64 ", "!0s, !1S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vsqrts,       0xeeb10ac0,
                  kFmtSfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vsqrt.f32 ", "!0s, !1s", 4),
+                 "vsqrt.f32 ", "!0s, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vsqrtd,       0xeeb10bc0,
                  kFmtDfp, 22, 12, kFmtDfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vsqrt.f64 ", "!0S, !1S", 4),
+                 "vsqrt.f64 ", "!0S, !1S", 4, kFixupNone),
     ENCODING_MAP(kThumb2MovImmShift, 0xf04f0000, /* no setflags encoding */
                  kFmtBitBlt, 11, 8, kFmtModImm, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0,
-                 "mov", "!0C, #!1m", 4),
+                 "mov", "!0C, #!1m", 4, kFixupNone),
     ENCODING_MAP(kThumb2MovImm16,       0xf2400000,
                  kFmtBitBlt, 11, 8, kFmtImm16, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0,
-                 "mov", "!0C, #!1M", 4),
+                 "mov", "!0C, #!1M", 4, kFixupNone),
     ENCODING_MAP(kThumb2StrRRI12,       0xf8c00000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 11, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "str", "!0C, [!1C, #!2d]", 4),
+                 "str", "!0C, [!1C, #!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrRRI12,       0xf8d00000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 11, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldr", "!0C, [!1C, #!2d]", 4),
+                 "ldr", "!0C, [!1C, #!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2StrRRI8Predec,       0xf8400c00,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 8, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "str", "!0C, [!1C, #-!2d]", 4),
+                 "str", "!0C, [!1C, #-!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrRRI8Predec,       0xf8500c00,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 8, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldr", "!0C, [!1C, #-!2d]", 4),
+                 "ldr", "!0C, [!1C, #-!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2Cbnz,       0xb900, /* Note: does not affect flags */
                  kFmtBitBlt, 2, 0, kFmtImm6, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE0 | IS_BRANCH |
-                 NEEDS_FIXUP, "cbnz", "!0C,!1t", 2),
+                 NEEDS_FIXUP, "cbnz", "!0C,!1t", 2, kFixupCBxZ),
     ENCODING_MAP(kThumb2Cbz,       0xb100, /* Note: does not affect flags */
                  kFmtBitBlt, 2, 0, kFmtImm6, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE0 | IS_BRANCH |
-                 NEEDS_FIXUP, "cbz", "!0C,!1t", 2),
+                 NEEDS_FIXUP, "cbz", "!0C,!1t", 2, kFixupCBxZ),
     ENCODING_MAP(kThumb2AddRRI12,       0xf2000000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtImm12, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1,/* Note: doesn't affect flags */
-                 "add", "!0C,!1C,#!2d", 4),
+                 "add", "!0C,!1C,#!2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2MovRR,       0xea4f0000, /* no setflags encoding */
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 3, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "mov", "!0C, !1C", 4),
+                 "mov", "!0C, !1C", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vmovs,       0xeeb00a40,
                  kFmtSfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vmov.f32 ", " !0s, !1s", 4),
+                 "vmov.f32 ", " !0s, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vmovd,       0xeeb00b40,
                  kFmtDfp, 22, 12, kFmtDfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vmov.f64 ", " !0S, !1S", 4),
+                 "vmov.f64 ", " !0S, !1S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Ldmia,         0xe8900000,
                  kFmtBitBlt, 19, 16, kFmtBitBlt, 15, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE0 | REG_DEF_LIST1 | IS_LOAD,
-                 "ldmia", "!0C!!, <!1R>", 4),
+                 "ldmia", "!0C!!, <!1R>", 4, kFixupNone),
     ENCODING_MAP(kThumb2Stmia,         0xe8800000,
                  kFmtBitBlt, 19, 16, kFmtBitBlt, 15, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE0 | REG_USE_LIST1 | IS_STORE,
-                 "stmia", "!0C!!, <!1R>", 4),
+                 "stmia", "!0C!!, <!1R>", 4, kFixupNone),
     ENCODING_MAP(kThumb2AddRRR,  0xeb100000, /* setflags encoding */
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1,
                  IS_QUAD_OP | REG_DEF0_USE12 | SETS_CCODES,
-                 "adds", "!0C, !1C, !2C!3H", 4),
+                 "adds", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2SubRRR,       0xebb00000, /* setflags enconding */
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1,
                  IS_QUAD_OP | REG_DEF0_USE12 | SETS_CCODES,
-                 "subs", "!0C, !1C, !2C!3H", 4),
+                 "subs", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2SbcRRR,       0xeb700000, /* setflags encoding */
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1,
                  IS_QUAD_OP | REG_DEF0_USE12 | USES_CCODES | SETS_CCODES,
-                 "sbcs", "!0C, !1C, !2C!3H", 4),
+                 "sbcs", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2CmpRR,       0xebb00f00,
                  kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0, kFmtShift, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_USE01 | SETS_CCODES,
-                 "cmp", "!0C, !1C", 4),
+                 "cmp", "!0C, !1C", 4, kFixupNone),
     ENCODING_MAP(kThumb2SubRRI12,       0xf2a00000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtImm12, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1,/* Note: doesn't affect flags */
-                 "sub", "!0C,!1C,#!2d", 4),
+                 "sub", "!0C,!1C,#!2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2MvnImm12,  0xf06f0000, /* no setflags encoding */
                  kFmtBitBlt, 11, 8, kFmtImm12, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0,
-                 "mvn", "!0C, #!1n", 4),
+                 "mvn", "!0C, #!1n", 4, kFixupNone),
     ENCODING_MAP(kThumb2Sel,       0xfaa0f080,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE12 | USES_CCODES,
-                 "sel", "!0C, !1C, !2C", 4),
+                 "sel", "!0C, !1C, !2C", 4, kFixupNone),
     ENCODING_MAP(kThumb2Ubfx,       0xf3c00000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtLsb, -1, -1,
                  kFmtBWidth, 4, 0, IS_QUAD_OP | REG_DEF0_USE1,
-                 "ubfx", "!0C, !1C, #!2d, #!3d", 4),
+                 "ubfx", "!0C, !1C, #!2d, #!3d", 4, kFixupNone),
     ENCODING_MAP(kThumb2Sbfx,       0xf3400000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtLsb, -1, -1,
                  kFmtBWidth, 4, 0, IS_QUAD_OP | REG_DEF0_USE1,
-                 "sbfx", "!0C, !1C, #!2d, #!3d", 4),
+                 "sbfx", "!0C, !1C, #!2d, #!3d", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrRRR,    0xf8500000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 5, 4, IS_QUAD_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldr", "!0C, [!1C, !2C, LSL #!3d]", 4),
+                 "ldr", "!0C, [!1C, !2C, LSL #!3d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrhRRR,    0xf8300000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 5, 4, IS_QUAD_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldrh", "!0C, [!1C, !2C, LSL #!3d]", 4),
+                 "ldrh", "!0C, [!1C, !2C, LSL #!3d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrshRRR,    0xf9300000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 5, 4, IS_QUAD_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldrsh", "!0C, [!1C, !2C, LSL #!3d]", 4),
+                 "ldrsh", "!0C, [!1C, !2C, LSL #!3d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrbRRR,    0xf8100000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 5, 4, IS_QUAD_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldrb", "!0C, [!1C, !2C, LSL #!3d]", 4),
+                 "ldrb", "!0C, [!1C, !2C, LSL #!3d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrsbRRR,    0xf9100000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 5, 4, IS_QUAD_OP | REG_DEF0_USE12 | IS_LOAD,
-                 "ldrsb", "!0C, [!1C, !2C, LSL #!3d]", 4),
+                 "ldrsb", "!0C, [!1C, !2C, LSL #!3d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2StrRRR,    0xf8400000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 5, 4, IS_QUAD_OP | REG_USE012 | IS_STORE,
-                 "str", "!0C, [!1C, !2C, LSL #!3d]", 4),
+                 "str", "!0C, [!1C, !2C, LSL #!3d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2StrhRRR,    0xf8200000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 5, 4, IS_QUAD_OP | REG_USE012 | IS_STORE,
-                 "strh", "!0C, [!1C, !2C, LSL #!3d]", 4),
+                 "strh", "!0C, [!1C, !2C, LSL #!3d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2StrbRRR,    0xf8000000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 5, 4, IS_QUAD_OP | REG_USE012 | IS_STORE,
-                 "strb", "!0C, [!1C, !2C, LSL #!3d]", 4),
+                 "strb", "!0C, [!1C, !2C, LSL #!3d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrhRRI12,       0xf8b00000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 11, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldrh", "!0C, [!1C, #!2d]", 4),
+                 "ldrh", "!0C, [!1C, #!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrshRRI12,       0xf9b00000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 11, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldrsh", "!0C, [!1C, #!2d]", 4),
+                 "ldrsh", "!0C, [!1C, #!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrbRRI12,       0xf8900000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 11, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldrb", "!0C, [!1C, #!2d]", 4),
+                 "ldrb", "!0C, [!1C, #!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrsbRRI12,       0xf9900000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 11, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldrsb", "!0C, [!1C, #!2d]", 4),
+                 "ldrsb", "!0C, [!1C, #!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2StrhRRI12,       0xf8a00000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 11, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "strh", "!0C, [!1C, #!2d]", 4),
+                 "strh", "!0C, [!1C, #!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2StrbRRI12,       0xf8800000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 11, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_USE01 | IS_STORE,
-                 "strb", "!0C, [!1C, #!2d]", 4),
+                 "strb", "!0C, [!1C, #!2d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2Pop,           0xe8bd0000,
                  kFmtBitBlt, 15, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP | REG_DEF_LIST0
-                 | IS_LOAD | NEEDS_FIXUP, "pop", "<!0R>", 4),
+                 | IS_LOAD | NEEDS_FIXUP, "pop", "<!0R>", 4, kFixupPushPop),
     ENCODING_MAP(kThumb2Push,          0xe92d0000,
                  kFmtBitBlt, 15, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP | REG_USE_LIST0
-                 | IS_STORE | NEEDS_FIXUP, "push", "<!0R>", 4),
+                 | IS_STORE | NEEDS_FIXUP, "push", "<!0R>", 4, kFixupPushPop),
     ENCODING_MAP(kThumb2CmpRI12, 0xf1b00f00,
                  kFmtBitBlt, 19, 16, kFmtModImm, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_USE0 | SETS_CCODES,
-                 "cmp", "!0C, #!1m", 4),
+                 "cmp", "!0C, #!1m", 4, kFixupNone),
     ENCODING_MAP(kThumb2AdcRRR,  0xeb500000, /* setflags encoding */
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1,
                  IS_QUAD_OP | REG_DEF0_USE12 | SETS_CCODES,
-                 "adcs", "!0C, !1C, !2C!3H", 4),
+                 "adcs", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2AndRRR,  0xea000000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1, IS_QUAD_OP | REG_DEF0_USE12,
-                 "and", "!0C, !1C, !2C!3H", 4),
+                 "and", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2BicRRR,  0xea200000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1, IS_QUAD_OP | REG_DEF0_USE12,
-                 "bic", "!0C, !1C, !2C!3H", 4),
+                 "bic", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2CmnRR,  0xeb000000,
                  kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0, kFmtShift, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "cmn", "!0C, !1C, shift !2d", 4),
+                 "cmn", "!0C, !1C, shift !2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2EorRRR,  0xea800000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1, IS_QUAD_OP | REG_DEF0_USE12,
-                 "eor", "!0C, !1C, !2C!3H", 4),
+                 "eor", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2MulRRR,  0xfb00f000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "mul", "!0C, !1C, !2C", 4),
+                 "mul", "!0C, !1C, !2C", 4, kFixupNone),
     ENCODING_MAP(kThumb2MnvRR,  0xea6f0000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 3, 0, kFmtShift, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "mvn", "!0C, !1C, shift !2d", 4),
+                 "mvn", "!0C, !1C, shift !2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2RsubRRI8,       0xf1d00000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "rsb", "!0C,!1C,#!2m", 4),
+                 "rsb", "!0C,!1C,#!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2NegRR,       0xf1d00000, /* instance of rsub */
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "neg", "!0C,!1C", 4),
+                 "neg", "!0C,!1C", 4, kFixupNone),
     ENCODING_MAP(kThumb2OrrRRR,  0xea400000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1, IS_QUAD_OP | REG_DEF0_USE12,
-                 "orr", "!0C, !1C, !2C!3H", 4),
+                 "orr", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2TstRR,       0xea100f00,
                  kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0, kFmtShift, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_USE01 | SETS_CCODES,
-                 "tst", "!0C, !1C, shift !2d", 4),
+                 "tst", "!0C, !1C, shift !2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2LslRRR,  0xfa00f000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "lsl", "!0C, !1C, !2C", 4),
+                 "lsl", "!0C, !1C, !2C", 4, kFixupNone),
     ENCODING_MAP(kThumb2LsrRRR,  0xfa20f000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "lsr", "!0C, !1C, !2C", 4),
+                 "lsr", "!0C, !1C, !2C", 4, kFixupNone),
     ENCODING_MAP(kThumb2AsrRRR,  0xfa40f000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "asr", "!0C, !1C, !2C", 4),
+                 "asr", "!0C, !1C, !2C", 4, kFixupNone),
     ENCODING_MAP(kThumb2RorRRR,  0xfa60f000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "ror", "!0C, !1C, !2C", 4),
+                 "ror", "!0C, !1C, !2C", 4, kFixupNone),
     ENCODING_MAP(kThumb2LslRRI5,  0xea4f0000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 3, 0, kFmtShift5, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "lsl", "!0C, !1C, #!2d", 4),
+                 "lsl", "!0C, !1C, #!2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2LsrRRI5,  0xea4f0010,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 3, 0, kFmtShift5, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "lsr", "!0C, !1C, #!2d", 4),
+                 "lsr", "!0C, !1C, #!2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2AsrRRI5,  0xea4f0020,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 3, 0, kFmtShift5, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "asr", "!0C, !1C, #!2d", 4),
+                 "asr", "!0C, !1C, #!2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2RorRRI5,  0xea4f0030,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 3, 0, kFmtShift5, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "ror", "!0C, !1C, #!2d", 4),
+                 "ror", "!0C, !1C, #!2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2BicRRI8,  0xf0200000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "bic", "!0C, !1C, #!2m", 4),
+                 "bic", "!0C, !1C, #!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2AndRRI8,  0xf0000000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "and", "!0C, !1C, #!2m", 4),
+                 "and", "!0C, !1C, #!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2OrrRRI8,  0xf0400000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "orr", "!0C, !1C, #!2m", 4),
+                 "orr", "!0C, !1C, #!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2EorRRI8,  0xf0800000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1,
-                 "eor", "!0C, !1C, #!2m", 4),
+                 "eor", "!0C, !1C, #!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2AddRRI8,  0xf1100000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "adds", "!0C, !1C, #!2m", 4),
+                 "adds", "!0C, !1C, #!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2AdcRRI8,  0xf1500000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES | USES_CCODES,
-                 "adcs", "!0C, !1C, #!2m", 4),
+                 "adcs", "!0C, !1C, #!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2SubRRI8,  0xf1b00000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "subs", "!0C, !1C, #!2m", 4),
+                 "subs", "!0C, !1C, #!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2SbcRRI8,  0xf1700000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES | USES_CCODES,
-                 "sbcs", "!0C, !1C, #!2m", 4),
+                 "sbcs", "!0C, !1C, #!2m", 4, kFixupNone),
     ENCODING_MAP(kThumb2It,  0xbf00,
                  kFmtBitBlt, 7, 4, kFmtBitBlt, 3, 0, kFmtModImm, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | IS_IT | USES_CCODES,
-                 "it:!1b", "!0c", 2),
+                 "it:!1b", "!0c", 2, kFixupNone),
     ENCODING_MAP(kThumb2Fmstat,  0xeef1fa10,
                  kFmtUnused, -1, -1, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, NO_OPERAND | SETS_CCODES,
-                 "fmstat", "", 4),
+                 "fmstat", "", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vcmpd,        0xeeb40b40,
                  kFmtDfp, 22, 12, kFmtDfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE01,
-                 "vcmp.f64", "!0S, !1S", 4),
+                 "vcmp.f64", "!0S, !1S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vcmps,        0xeeb40a40,
                  kFmtSfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_USE01,
-                 "vcmp.f32", "!0s, !1s", 4),
+                 "vcmp.f32", "!0s, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrPcRel12,       0xf8df0000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 11, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0 | REG_USE_PC | IS_LOAD | NEEDS_FIXUP,
-                 "ldr", "!0C, [r15pc, #!1d]", 4),
+                 "ldr", "!0C, [r15pc, #!1d]", 4, kFixupLoad),
     ENCODING_MAP(kThumb2BCond,        0xf0008000,
                  kFmtBrOffset, -1, -1, kFmtBitBlt, 25, 22, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | IS_BRANCH | USES_CCODES | NEEDS_FIXUP,
-                 "b!1c", "!0t", 4),
+                 "b!1c", "!0t", 4, kFixupCondBranch),
     ENCODING_MAP(kThumb2Vmovd_RR,       0xeeb00b40,
                  kFmtDfp, 22, 12, kFmtDfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vmov.f64", "!0S, !1S", 4),
+                 "vmov.f64", "!0S, !1S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vmovs_RR,       0xeeb00a40,
                  kFmtSfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vmov.f32", "!0s, !1s", 4),
+                 "vmov.f32", "!0s, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Fmrs,       0xee100a10,
                  kFmtBitBlt, 15, 12, kFmtSfp, 7, 16, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "fmrs", "!0C, !1s", 4),
+                 "fmrs", "!0C, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Fmsr,       0xee000a10,
                  kFmtSfp, 7, 16, kFmtBitBlt, 15, 12, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "fmsr", "!0s, !1C", 4),
+                 "fmsr", "!0s, !1C", 4, kFixupNone),
     ENCODING_MAP(kThumb2Fmrrd,       0xec500b10,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtDfp, 5, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF01_USE2,
-                 "fmrrd", "!0C, !1C, !2S", 4),
+                 "fmrrd", "!0C, !1C, !2S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Fmdrr,       0xec400b10,
                  kFmtDfp, 5, 0, kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE12,
-                 "fmdrr", "!0S, !1C, !2C", 4),
+                 "fmdrr", "!0S, !1C, !2C", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vabsd,       0xeeb00bc0,
                  kFmtDfp, 22, 12, kFmtDfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vabs.f64", "!0S, !1S", 4),
+                 "vabs.f64", "!0S, !1S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vabss,       0xeeb00ac0,
                  kFmtSfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vabs.f32", "!0s, !1s", 4),
+                 "vabs.f32", "!0s, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vnegd,       0xeeb10b40,
                  kFmtDfp, 22, 12, kFmtDfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vneg.f64", "!0S, !1S", 4),
+                 "vneg.f64", "!0S, !1S", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vnegs,       0xeeb10a40,
                  kFmtSfp, 22, 12, kFmtSfp, 5, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0_USE1,
-                 "vneg.f32", "!0s, !1s", 4),
+                 "vneg.f32", "!0s, !1s", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vmovs_IMM8,       0xeeb00a00,
                  kFmtSfp, 22, 12, kFmtFPImm, 16, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0,
-                 "vmov.f32", "!0s, #0x!1h", 4),
+                 "vmov.f32", "!0s, #0x!1h", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vmovd_IMM8,       0xeeb00b00,
                  kFmtDfp, 22, 12, kFmtFPImm, 16, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0,
-                 "vmov.f64", "!0S, #0x!1h", 4),
+                 "vmov.f64", "!0S, #0x!1h", 4, kFixupNone),
     ENCODING_MAP(kThumb2Mla,  0xfb000000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtBitBlt, 15, 12,
                  IS_QUAD_OP | REG_DEF0 | REG_USE1 | REG_USE2 | REG_USE3,
-                 "mla", "!0C, !1C, !2C, !3C", 4),
+                 "mla", "!0C, !1C, !2C, !3C", 4, kFixupNone),
     ENCODING_MAP(kThumb2Umull,  0xfba00000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16,
                  kFmtBitBlt, 3, 0,
                  IS_QUAD_OP | REG_DEF0 | REG_DEF1 | REG_USE2 | REG_USE3,
-                 "umull", "!0C, !1C, !2C, !3C", 4),
+                 "umull", "!0C, !1C, !2C, !3C", 4, kFixupNone),
     ENCODING_MAP(kThumb2Ldrex,       0xe8500f00,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0_USE1 | IS_LOAD,
-                 "ldrex", "!0C, [!1C, #!2E]", 4),
+                 "ldrex", "!0C, [!1C, #!2E]", 4, kFixupNone),
     ENCODING_MAP(kThumb2Strex,       0xe8400000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 15, 12, kFmtBitBlt, 19, 16,
                  kFmtBitBlt, 7, 0, IS_QUAD_OP | REG_DEF0_USE12 | IS_STORE,
-                 "strex", "!0C,!1C, [!2C, #!2E]", 4),
+                 "strex", "!0C,!1C, [!2C, #!2E]", 4, kFixupNone),
     ENCODING_MAP(kThumb2Clrex,       0xf3bf8f2f,
                  kFmtUnused, -1, -1, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, NO_OPERAND,
-                 "clrex", "", 4),
+                 "clrex", "", 4, kFixupNone),
     ENCODING_MAP(kThumb2Bfi,         0xf3600000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtShift5, -1, -1,
                  kFmtBitBlt, 4, 0, IS_QUAD_OP | REG_DEF0_USE1,
-                 "bfi", "!0C,!1C,#!2d,#!3d", 4),
+                 "bfi", "!0C,!1C,#!2d,#!3d", 4, kFixupNone),
     ENCODING_MAP(kThumb2Bfc,         0xf36f0000,
                  kFmtBitBlt, 11, 8, kFmtShift5, -1, -1, kFmtBitBlt, 4, 0,
                  kFmtUnused, -1, -1, IS_TERTIARY_OP | REG_DEF0,
-                 "bfc", "!0C,#!1d,#!2d", 4),
+                 "bfc", "!0C,#!1d,#!2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2Dmb,         0xf3bf8f50,
                  kFmtBitBlt, 3, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_UNARY_OP,
-                 "dmb", "#!0B", 4),
+                 "dmb", "#!0B", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrPcReln12,       0xf85f0000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 11, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0 | REG_USE_PC | IS_LOAD,
-                 "ldr", "!0C, [r15pc, -#!1d]", 4),
+                 "ldr", "!0C, [r15pc, -#!1d]", 4, kFixupNone),
     ENCODING_MAP(kThumb2Stm,          0xe9000000,
                  kFmtBitBlt, 19, 16, kFmtBitBlt, 12, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_USE0 | REG_USE_LIST1 | IS_STORE,
-                 "stm", "!0C, <!1R>", 4),
+                 "stm", "!0C, <!1R>", 4, kFixupNone),
     ENCODING_MAP(kThumbUndefined,       0xde00,
                  kFmtUnused, -1, -1, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, NO_OPERAND,
-                 "undefined", "", 2),
+                 "undefined", "", 2, kFixupNone),
     // NOTE: vpop, vpush hard-encoded for s16+ reg list
     ENCODING_MAP(kThumb2VPopCS,       0xecbd8a00,
                  kFmtBitBlt, 7, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP | REG_DEF_FPCS_LIST0
-                 | IS_LOAD, "vpop", "<!0P>", 4),
+                 | IS_LOAD, "vpop", "<!0P>", 4, kFixupNone),
     ENCODING_MAP(kThumb2VPushCS,      0xed2d8a00,
                  kFmtBitBlt, 7, 0, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP | REG_USE_FPCS_LIST0
-                 | IS_STORE, "vpush", "<!0P>", 4),
+                 | IS_STORE, "vpush", "<!0P>", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vldms,        0xec900a00,
                  kFmtBitBlt, 19, 16, kFmtSfp, 22, 12, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_USE0 | REG_DEF_FPCS_LIST2
-                 | IS_LOAD, "vldms", "!0C, <!2Q>", 4),
+                 | IS_LOAD, "vldms", "!0C, <!2Q>", 4, kFixupNone),
     ENCODING_MAP(kThumb2Vstms,        0xec800a00,
                  kFmtBitBlt, 19, 16, kFmtSfp, 22, 12, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_USE0 | REG_USE_FPCS_LIST2
-                 | IS_STORE, "vstms", "!0C, <!2Q>", 4),
+                 | IS_STORE, "vstms", "!0C, <!2Q>", 4, kFixupNone),
     ENCODING_MAP(kThumb2BUncond,      0xf0009000,
                  kFmtOff24, -1, -1, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, NO_OPERAND | IS_BRANCH,
-                 "b", "!0t", 4),
+                 "b", "!0t", 4, kFixupT2Branch),
     ENCODING_MAP(kThumb2MovImm16H,       0xf2c00000,
                  kFmtBitBlt, 11, 8, kFmtImm16, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0 | REG_USE0,
-                 "movt", "!0C, #!1M", 4),
+                 "movt", "!0C, #!1M", 4, kFixupNone),
     ENCODING_MAP(kThumb2AddPCR,      0x4487,
                  kFmtBitBlt, 6, 3, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
-                 IS_UNARY_OP | REG_USE0 | IS_BRANCH,
-                 "add", "rPC, !0C", 2),
+                 IS_UNARY_OP | REG_USE0 | IS_BRANCH | NEEDS_FIXUP,
+                 "add", "rPC, !0C", 2, kFixupLabel),
     ENCODING_MAP(kThumb2Adr,         0xf20f0000,
                  kFmtBitBlt, 11, 8, kFmtImm12, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  /* Note: doesn't affect flags */
                  IS_TERTIARY_OP | REG_DEF0 | NEEDS_FIXUP,
-                 "adr", "!0C,#!1d", 4),
+                 "adr", "!0C,#!1d", 4, kFixupAdr),
     ENCODING_MAP(kThumb2MovImm16LST,     0xf2400000,
                  kFmtBitBlt, 11, 8, kFmtImm16, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0 | NEEDS_FIXUP,
-                 "mov", "!0C, #!1M", 4),
+                 "mov", "!0C, #!1M", 4, kFixupMovImmLST),
     ENCODING_MAP(kThumb2MovImm16HST,     0xf2c00000,
                  kFmtBitBlt, 11, 8, kFmtImm16, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1, IS_BINARY_OP | REG_DEF0 | REG_USE0 | NEEDS_FIXUP,
-                 "movt", "!0C, #!1M", 4),
+                 "movt", "!0C, #!1M", 4, kFixupMovImmHST),
     ENCODING_MAP(kThumb2LdmiaWB,         0xe8b00000,
                  kFmtBitBlt, 19, 16, kFmtBitBlt, 15, 0, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_BINARY_OP | REG_DEF0_USE0 | REG_DEF_LIST1 | IS_LOAD,
-                 "ldmia", "!0C!!, <!1R>", 4),
+                 "ldmia", "!0C!!, <!1R>", 4, kFixupNone),
     ENCODING_MAP(kThumb2SubsRRI12,       0xf1b00000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtImm12, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0_USE1 | SETS_CCODES,
-                 "subs", "!0C,!1C,#!2d", 4),
+                 "subs", "!0C,!1C,#!2d", 4, kFixupNone),
     ENCODING_MAP(kThumb2OrrRRRs,  0xea500000,
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1, IS_QUAD_OP | REG_DEF0_USE12 | SETS_CCODES,
-                 "orrs", "!0C, !1C, !2C!3H", 4),
+                 "orrs", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2Push1,    0xf84d0d04,
                  kFmtBitBlt, 15, 12, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP | REG_USE0
-                 | IS_STORE, "push1", "!0C", 4),
+                 | IS_STORE, "push1", "!0C", 4, kFixupNone),
     ENCODING_MAP(kThumb2Pop1,    0xf85d0b04,
                  kFmtBitBlt, 15, 12, kFmtUnused, -1, -1, kFmtUnused, -1, -1,
                  kFmtUnused, -1, -1,
                  IS_UNARY_OP | REG_DEF_SP | REG_USE_SP | REG_DEF0
-                 | IS_LOAD, "pop1", "!0C", 4),
+                 | IS_LOAD, "pop1", "!0C", 4, kFixupNone),
     ENCODING_MAP(kThumb2RsubRRR,  0xebd00000, /* setflags encoding */
                  kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16, kFmtBitBlt, 3, 0,
                  kFmtShift, -1, -1,
                  IS_QUAD_OP | REG_DEF0_USE12 | SETS_CCODES,
-                 "rsbs", "!0C, !1C, !2C!3H", 4),
+                 "rsbs", "!0C, !1C, !2C!3H", 4, kFixupNone),
     ENCODING_MAP(kThumb2Smull,  0xfb800000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16,
                  kFmtBitBlt, 3, 0,
                  IS_QUAD_OP | REG_DEF0 | REG_DEF1 | REG_USE2 | REG_USE3,
-                 "smull", "!0C, !1C, !2C, !3C", 4),
+                 "smull", "!0C, !1C, !2C, !3C", 4, kFixupNone),
     ENCODING_MAP(kThumb2LdrdPcRel8,  0xe9df0000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 11, 8, kFmtBitBlt, 7, 0,
                  kFmtUnused, -1, -1,
                  IS_TERTIARY_OP | REG_DEF0 | REG_DEF1 | REG_USE_PC | IS_LOAD | NEEDS_FIXUP,
-                 "ldrd", "!0C, !1C, [pc, #!2E]", 4),
+                 "ldrd", "!0C, !1C, [pc, #!2E]", 4, kFixupLoad),
     ENCODING_MAP(kThumb2LdrdI8, 0xe9d00000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16,
                  kFmtBitBlt, 7, 0,
                  IS_QUAD_OP | REG_DEF0 | REG_DEF1 | REG_USE2 | IS_LOAD,
-                 "ldrd", "!0C, !1C, [!2C, #!3E]", 4),
+                 "ldrd", "!0C, !1C, [!2C, #!3E]", 4, kFixupNone),
     ENCODING_MAP(kThumb2StrdI8, 0xe9c00000,
                  kFmtBitBlt, 15, 12, kFmtBitBlt, 11, 8, kFmtBitBlt, 19, 16,
                  kFmtBitBlt, 7, 0,
                  IS_QUAD_OP | REG_USE0 | REG_USE1 | REG_USE2 | IS_STORE,
-                 "strd", "!0C, !1C, [!2C, #!3E]", 4),
+                 "strd", "!0C, !1C, [!2C, #!3E]", 4, kFixupNone),
 };
 
+// new_lir replaces orig_lir in the pcrel_fixup list.
+void ArmMir2Lir::ReplaceFixup(LIR* prev_lir, LIR* orig_lir, LIR* new_lir) {
+  new_lir->u.a.pcrel_next = orig_lir->u.a.pcrel_next;
+  if (UNLIKELY(prev_lir == NULL)) {
+    first_fixup_ = new_lir;
+  } else {
+    prev_lir->u.a.pcrel_next = new_lir;
+  }
+  orig_lir->flags.fixup = kFixupNone;
+}
+
+// new_lir is inserted before orig_lir in the pcrel_fixup list.
+void ArmMir2Lir::InsertFixupBefore(LIR* prev_lir, LIR* orig_lir, LIR* new_lir) {
+  new_lir->u.a.pcrel_next = orig_lir;
+  if (UNLIKELY(prev_lir == NULL)) {
+    first_fixup_ = new_lir;
+  } else {
+    DCHECK(prev_lir->u.a.pcrel_next == orig_lir);
+    prev_lir->u.a.pcrel_next = new_lir;
+  }
+}
+
 /*
  * The fake NOP of moving r0 to r0 actually will incur data stalls if r0 is
  * not ready. Since r5FP is not updated often, it is less likely to
@@ -997,290 +1019,16 @@
  */
 #define PADDING_MOV_R5_R5               0x1C2D
 
-/*
- * Assemble the LIR into binary instruction format.  Note that we may
- * discover that pc-relative displacements may not fit the selected
- * instruction.
- */
-AssemblerStatus ArmMir2Lir::AssembleInstructions(uintptr_t start_addr) {
-  LIR* lir;
-  AssemblerStatus res = kSuccess;  // Assume success
-
-  for (lir = first_lir_insn_; lir != NULL; lir = NEXT_LIR(lir)) {
-    if (lir->opcode < 0) {
-      /* 1 means padding is needed */
-      if ((lir->opcode == kPseudoPseudoAlign4) && (lir->operands[0] == 1)) {
-        code_buffer_.push_back(PADDING_MOV_R5_R5 & 0xFF);
-        code_buffer_.push_back((PADDING_MOV_R5_R5 >> 8) & 0xFF);
-      }
-      continue;
+void ArmMir2Lir::EncodeLIR(LIR* lir) {
+  int opcode = lir->opcode;
+  if (opcode < 0) {
+    if (UNLIKELY(opcode == kPseudoPseudoAlign4)) {
+      // Note: size for this opcode will be either 0 or 2 depending on final alignment.
+      lir->u.a.bytes[0] = (PADDING_MOV_R5_R5 & 0xff);
+      lir->u.a.bytes[1] = ((PADDING_MOV_R5_R5 >> 8) & 0xff);
+      lir->flags.size = (lir->offset & 0x2);
     }
-
-    if (lir->flags.is_nop) {
-      continue;
-    }
-
-    /*
-     * For PC-relative displacements we won't know if the
-     * selected instruction will work until late (i.e. - now).
-     * If something doesn't fit, we must replace the short-form
-     * operation with a longer-form one.  Note, though, that this
-     * can change code we've already processed, so we'll need to
-     * re-calculate offsets and restart.  To limit the number of
-     * restarts, the entire list will be scanned and patched.
-     * Of course, the patching itself may cause new overflows so this
-     * is an iterative process.
-     */
-    if (lir->flags.pcRelFixup) {
-      if (lir->opcode == kThumbLdrPcRel ||
-          lir->opcode == kThumb2LdrPcRel12 ||
-          lir->opcode == kThumbAddPcRel ||
-          lir->opcode == kThumb2LdrdPcRel8 ||
-          ((lir->opcode == kThumb2Vldrd) && (lir->operands[1] == r15pc)) ||
-          ((lir->opcode == kThumb2Vldrs) && (lir->operands[1] == r15pc))) {
-        /*
-         * PC-relative loads are mostly used to load immediates
-         * that are too large to materialize directly in one shot.
-         * However, if the load displacement exceeds the limit,
-         * we revert to a multiple-instruction materialization sequence.
-         */
-        LIR *lir_target = lir->target;
-        uintptr_t pc = (lir->offset + 4) & ~3;
-        uintptr_t target = lir_target->offset;
-        int delta = target - pc;
-        if (delta & 0x3) {
-          LOG(FATAL) << "PC-rel offset not multiple of 4: " << delta;
-        }
-        // First, a sanity check for cases we shouldn't see now
-        if (((lir->opcode == kThumbAddPcRel) && (delta > 1020)) ||
-            ((lir->opcode == kThumbLdrPcRel) && (delta > 1020))) {
-          // Shouldn't happen in current codegen.
-          LOG(FATAL) << "Unexpected pc-rel offset " << delta;
-        }
-        // Now, check for the difficult cases
-        if (((lir->opcode == kThumb2LdrPcRel12) && (delta > 4091)) ||
-            ((lir->opcode == kThumb2LdrdPcRel8) && (delta > 1020)) ||
-            ((lir->opcode == kThumb2Vldrs) && (delta > 1020)) ||
-            ((lir->opcode == kThumb2Vldrd) && (delta > 1020))) {
-          /*
-           * Note: because rARM_LR may be used to fix up out-of-range
-           * vldrs/vldrd we include REG_DEF_LR in the resource
-           * masks for these instructions.
-           */
-          int base_reg = ((lir->opcode == kThumb2LdrdPcRel8) || (lir->opcode == kThumb2LdrPcRel12))
-              ?  lir->operands[0] : rARM_LR;
-
-          // Add new Adr to generate the address.
-          LIR* new_adr = RawLIR(lir->dalvik_offset, kThumb2Adr,
-                     base_reg, 0, 0, 0, 0, lir->target);
-          InsertLIRBefore(lir, new_adr);
-
-          // Convert to normal load.
-          if (lir->opcode == kThumb2LdrPcRel12) {
-            lir->opcode = kThumb2LdrRRI12;
-          } else if (lir->opcode == kThumb2LdrdPcRel8) {
-            lir->opcode = kThumb2LdrdI8;
-          }
-          // Change the load to be relative to the new Adr base.
-          if (lir->opcode == kThumb2LdrdI8) {
-            lir->operands[3] = 0;
-            lir->operands[2] = base_reg;
-          } else {
-            lir->operands[2] = 0;
-            lir->operands[1] = base_reg;
-          }
-          SetupResourceMasks(lir);
-          res = kRetryAll;
-        } else {
-          if ((lir->opcode == kThumb2Vldrs) ||
-              (lir->opcode == kThumb2Vldrd) ||
-              (lir->opcode == kThumb2LdrdPcRel8)) {
-            lir->operands[2] = delta >> 2;
-          } else {
-            lir->operands[1] = (lir->opcode == kThumb2LdrPcRel12) ?  delta :
-                delta >> 2;
-          }
-        }
-      } else if (lir->opcode == kThumb2Cbnz || lir->opcode == kThumb2Cbz) {
-        LIR *target_lir = lir->target;
-        uintptr_t pc = lir->offset + 4;
-        uintptr_t target = target_lir->offset;
-        int delta = target - pc;
-        if (delta > 126 || delta < 0) {
-          /*
-           * Convert to cmp rx,#0 / b[eq/ne] tgt pair
-           * Make new branch instruction and insert after
-           */
-          LIR* new_inst =
-            RawLIR(lir->dalvik_offset, kThumbBCond, 0,
-                   (lir->opcode == kThumb2Cbz) ? kArmCondEq : kArmCondNe,
-                   0, 0, 0, lir->target);
-          InsertLIRAfter(lir, new_inst);
-          /* Convert the cb[n]z to a cmp rx, #0 ] */
-          lir->opcode = kThumbCmpRI8;
-          /* operand[0] is src1 in both cb[n]z & CmpRI8 */
-          lir->operands[1] = 0;
-          lir->target = 0;
-          SetupResourceMasks(lir);
-          /*
-           * Because we just added this new instruction after the current one,
-           * advance lir so that this new instruction won't be checked for displacement
-           * overflow until the next pass (when its base offset will be properly established).
-           */
-          lir = new_inst;
-          res = kRetryAll;
-        } else {
-          lir->operands[1] = delta >> 1;
-        }
-      } else if (lir->opcode == kThumb2Push || lir->opcode == kThumb2Pop) {
-        if (__builtin_popcount(lir->operands[0]) == 1) {
-          /*
-           * The standard push/pop multiple instruction
-           * requires at least two registers in the list.
-           * If we've got just one, switch to the single-reg
-           * encoding.
-           */
-          lir->opcode = (lir->opcode == kThumb2Push) ? kThumb2Push1 :
-              kThumb2Pop1;
-          int reg = 0;
-          while (lir->operands[0]) {
-            if (lir->operands[0] & 0x1) {
-              break;
-            } else {
-              reg++;
-              lir->operands[0] >>= 1;
-            }
-          }
-          lir->operands[0] = reg;
-          SetupResourceMasks(lir);
-          res = kRetryAll;
-        }
-      } else if (lir->opcode == kThumbBCond || lir->opcode == kThumb2BCond) {
-        LIR *target_lir = lir->target;
-        int delta = 0;
-        DCHECK(target_lir);
-        uintptr_t pc = lir->offset + 4;
-        uintptr_t target = target_lir->offset;
-        delta = target - pc;
-        if ((lir->opcode == kThumbBCond) && (delta > 254 || delta < -256)) {
-          lir->opcode = kThumb2BCond;
-          SetupResourceMasks(lir);
-          res = kRetryAll;
-        }
-        lir->operands[0] = delta >> 1;
-      } else if (lir->opcode == kThumb2BUncond) {
-        LIR *target_lir = lir->target;
-        uintptr_t pc = lir->offset + 4;
-        uintptr_t target = target_lir->offset;
-        int delta = target - pc;
-        lir->operands[0] = delta >> 1;
-        if (!(cu_->disable_opt & (1 << kSafeOptimizations)) &&
-          lir->operands[0] == 0) {  // Useless branch
-          NopLIR(lir);
-          res = kRetryAll;
-        }
-      } else if (lir->opcode == kThumbBUncond) {
-        LIR *target_lir = lir->target;
-        uintptr_t pc = lir->offset + 4;
-        uintptr_t target = target_lir->offset;
-        int delta = target - pc;
-        if (delta > 2046 || delta < -2048) {
-          // Convert to Thumb2BCond w/ kArmCondAl
-          lir->opcode = kThumb2BUncond;
-          lir->operands[0] = 0;
-          SetupResourceMasks(lir);
-          res = kRetryAll;
-        } else {
-          lir->operands[0] = delta >> 1;
-          if (!(cu_->disable_opt & (1 << kSafeOptimizations)) &&
-            lir->operands[0] == -1) {  // Useless branch
-            NopLIR(lir);
-            res = kRetryAll;
-          }
-        }
-      } else if (lir->opcode == kThumbBlx1) {
-        DCHECK(NEXT_LIR(lir)->opcode == kThumbBlx2);
-        /* cur_pc is Thumb */
-        uintptr_t cur_pc = (start_addr + lir->offset + 4) & ~3;
-        uintptr_t target = lir->operands[1];
-
-        /* Match bit[1] in target with base */
-        if (cur_pc & 0x2) {
-          target |= 0x2;
-        }
-        int delta = target - cur_pc;
-        DCHECK((delta >= -(1<<22)) && (delta <= ((1<<22)-2)));
-
-        lir->operands[0] = (delta >> 12) & 0x7ff;
-        NEXT_LIR(lir)->operands[0] = (delta>> 1) & 0x7ff;
-      } else if (lir->opcode == kThumbBl1) {
-        DCHECK(NEXT_LIR(lir)->opcode == kThumbBl2);
-        /* Both cur_pc and target are Thumb */
-        uintptr_t cur_pc = start_addr + lir->offset + 4;
-        uintptr_t target = lir->operands[1];
-
-        int delta = target - cur_pc;
-        DCHECK((delta >= -(1<<22)) && (delta <= ((1<<22)-2)));
-
-        lir->operands[0] = (delta >> 12) & 0x7ff;
-        NEXT_LIR(lir)->operands[0] = (delta>> 1) & 0x7ff;
-      } else if (lir->opcode == kThumb2Adr) {
-        SwitchTable *tab_rec = reinterpret_cast<SwitchTable*>(lir->operands[2]);
-        LIR* target = lir->target;
-        int target_disp = tab_rec ? tab_rec->offset
-                    : target->offset;
-        int disp = target_disp - ((lir->offset + 4) & ~3);
-        if (disp < 4096) {
-          lir->operands[1] = disp;
-        } else {
-          // convert to ldimm16l, ldimm16h, add tgt, pc, operands[0]
-          // TUNING: if this case fires often, it can be improved.  Not expected to be common.
-          LIR *new_mov16L =
-              RawLIR(lir->dalvik_offset, kThumb2MovImm16LST,
-                     lir->operands[0], 0, reinterpret_cast<uintptr_t>(lir),
-                     reinterpret_cast<uintptr_t>(tab_rec), 0, lir->target);
-          InsertLIRBefore(lir, new_mov16L);
-          LIR *new_mov16H =
-              RawLIR(lir->dalvik_offset, kThumb2MovImm16HST,
-                     lir->operands[0], 0, reinterpret_cast<uintptr_t>(lir),
-                     reinterpret_cast<uintptr_t>(tab_rec), 0, lir->target);
-          InsertLIRBefore(lir, new_mov16H);
-          if (ARM_LOWREG(lir->operands[0])) {
-            lir->opcode = kThumbAddRRLH;
-          } else {
-            lir->opcode = kThumbAddRRHH;
-          }
-          lir->operands[1] = rARM_PC;
-          SetupResourceMasks(lir);
-          res = kRetryAll;
-        }
-      } else if (lir->opcode == kThumb2MovImm16LST) {
-        // operands[1] should hold disp, [2] has add, [3] has tab_rec
-        LIR *addPCInst = reinterpret_cast<LIR*>(lir->operands[2]);
-        SwitchTable *tab_rec = reinterpret_cast<SwitchTable*>(lir->operands[3]);
-        // If tab_rec is null, this is a literal load. Use target
-        LIR* target = lir->target;
-        int target_disp = tab_rec ? tab_rec->offset : target->offset;
-        lir->operands[1] = (target_disp - (addPCInst->offset + 4)) & 0xffff;
-      } else if (lir->opcode == kThumb2MovImm16HST) {
-        // operands[1] should hold disp, [2] has add, [3] has tab_rec
-        LIR *addPCInst = reinterpret_cast<LIR*>(lir->operands[2]);
-        SwitchTable *tab_rec = reinterpret_cast<SwitchTable*>(lir->operands[3]);
-        // If tab_rec is null, this is a literal load. Use target
-        LIR* target = lir->target;
-        int target_disp = tab_rec ? tab_rec->offset : target->offset;
-        lir->operands[1] =
-            ((target_disp - (addPCInst->offset + 4)) >> 16) & 0xffff;
-      }
-    }
-    /*
-     * If one of the pc-relative instructions expanded we'll have
-     * to make another pass.  Don't bother to fully assemble the
-     * instruction.
-     */
-    if (res != kSuccess) {
-      continue;
-    }
+  } else if (LIKELY(!lir->flags.is_nop)) {
     const ArmEncodingMap *encoder = &EncodingMap[lir->opcode];
     uint32_t bits = encoder->skeleton;
     int i;
@@ -1288,113 +1036,623 @@
       uint32_t operand;
       uint32_t value;
       operand = lir->operands[i];
-      switch (encoder->field_loc[i].kind) {
-        case kFmtUnused:
-          break;
-        case kFmtFPImm:
-          value = ((operand & 0xF0) >> 4) << encoder->field_loc[i].end;
-          value |= (operand & 0x0F) << encoder->field_loc[i].start;
-          bits |= value;
-          break;
-        case kFmtBrOffset:
-          value = ((operand  & 0x80000) >> 19) << 26;
-          value |= ((operand & 0x40000) >> 18) << 11;
-          value |= ((operand & 0x20000) >> 17) << 13;
-          value |= ((operand & 0x1f800) >> 11) << 16;
-          value |= (operand  & 0x007ff);
-          bits |= value;
-          break;
-        case kFmtShift5:
-          value = ((operand & 0x1c) >> 2) << 12;
-          value |= (operand & 0x03) << 6;
-          bits |= value;
-          break;
-        case kFmtShift:
-          value = ((operand & 0x70) >> 4) << 12;
-          value |= (operand & 0x0f) << 4;
-          bits |= value;
-          break;
-        case kFmtBWidth:
-          value = operand - 1;
-          bits |= value;
-          break;
-        case kFmtLsb:
-          value = ((operand & 0x1c) >> 2) << 12;
-          value |= (operand & 0x03) << 6;
-          bits |= value;
-          break;
-        case kFmtImm6:
-          value = ((operand & 0x20) >> 5) << 9;
-          value |= (operand & 0x1f) << 3;
-          bits |= value;
-          break;
-        case kFmtBitBlt:
-          value = (operand << encoder->field_loc[i].start) &
-              ((1 << (encoder->field_loc[i].end + 1)) - 1);
-          bits |= value;
-          break;
-        case kFmtDfp: {
-          DCHECK(ARM_DOUBLEREG(operand));
-          DCHECK_EQ((operand & 0x1), 0U);
-          int reg_name = (operand & ARM_FP_REG_MASK) >> 1;
-          /* Snag the 1-bit slice and position it */
-          value = ((reg_name & 0x10) >> 4) << encoder->field_loc[i].end;
-          /* Extract and position the 4-bit slice */
-          value |= (reg_name & 0x0f) << encoder->field_loc[i].start;
-          bits |= value;
-          break;
-        }
-        case kFmtSfp:
-          DCHECK(ARM_SINGLEREG(operand));
-          /* Snag the 1-bit slice and position it */
-          value = (operand & 0x1) << encoder->field_loc[i].end;
-          /* Extract and position the 4-bit slice */
-          value |= ((operand & 0x1e) >> 1) << encoder->field_loc[i].start;
-          bits |= value;
-          break;
-        case kFmtImm12:
-        case kFmtModImm:
-          value = ((operand & 0x800) >> 11) << 26;
-          value |= ((operand & 0x700) >> 8) << 12;
-          value |= operand & 0x0ff;
-          bits |= value;
-          break;
-        case kFmtImm16:
-          value = ((operand & 0x0800) >> 11) << 26;
-          value |= ((operand & 0xf000) >> 12) << 16;
-          value |= ((operand & 0x0700) >> 8) << 12;
-          value |= operand & 0x0ff;
-          bits |= value;
-          break;
-        case kFmtOff24: {
-          uint32_t signbit = (operand >> 31) & 0x1;
-          uint32_t i1 = (operand >> 22) & 0x1;
-          uint32_t i2 = (operand >> 21) & 0x1;
-          uint32_t imm10 = (operand >> 11) & 0x03ff;
-          uint32_t imm11 = operand & 0x07ff;
-          uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
-          uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
-          value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
-              imm11;
-          bits |= value;
+      ArmEncodingKind kind = encoder->field_loc[i].kind;
+      if (LIKELY(kind == kFmtBitBlt)) {
+        value = (operand << encoder->field_loc[i].start) &
+            ((1 << (encoder->field_loc[i].end + 1)) - 1);
+        bits |= value;
+      } else {
+        switch (encoder->field_loc[i].kind) {
+          case kFmtSkip:
+            break;  // Nothing to do, but continue to next.
+          case kFmtUnused:
+            i = 4;  // Done, break out of the enclosing loop.
+            break;
+          case kFmtFPImm:
+            value = ((operand & 0xF0) >> 4) << encoder->field_loc[i].end;
+            value |= (operand & 0x0F) << encoder->field_loc[i].start;
+            bits |= value;
+            break;
+          case kFmtBrOffset:
+            value = ((operand  & 0x80000) >> 19) << 26;
+            value |= ((operand & 0x40000) >> 18) << 11;
+            value |= ((operand & 0x20000) >> 17) << 13;
+            value |= ((operand & 0x1f800) >> 11) << 16;
+            value |= (operand  & 0x007ff);
+            bits |= value;
+            break;
+          case kFmtShift5:
+            value = ((operand & 0x1c) >> 2) << 12;
+            value |= (operand & 0x03) << 6;
+            bits |= value;
+            break;
+          case kFmtShift:
+            value = ((operand & 0x70) >> 4) << 12;
+            value |= (operand & 0x0f) << 4;
+            bits |= value;
+            break;
+          case kFmtBWidth:
+            value = operand - 1;
+            bits |= value;
+            break;
+          case kFmtLsb:
+            value = ((operand & 0x1c) >> 2) << 12;
+            value |= (operand & 0x03) << 6;
+            bits |= value;
+            break;
+          case kFmtImm6:
+            value = ((operand & 0x20) >> 5) << 9;
+            value |= (operand & 0x1f) << 3;
+            bits |= value;
+            break;
+          case kFmtDfp: {
+            DCHECK(ARM_DOUBLEREG(operand));
+            DCHECK_EQ((operand & 0x1), 0U);
+            int reg_name = (operand & ARM_FP_REG_MASK) >> 1;
+            /* Snag the 1-bit slice and position it */
+            value = ((reg_name & 0x10) >> 4) << encoder->field_loc[i].end;
+            /* Extract and position the 4-bit slice */
+            value |= (reg_name & 0x0f) << encoder->field_loc[i].start;
+            bits |= value;
+            break;
           }
-          break;
-        default:
-          LOG(FATAL) << "Bad fmt:" << encoder->field_loc[i].kind;
+          case kFmtSfp:
+            DCHECK(ARM_SINGLEREG(operand));
+            /* Snag the 1-bit slice and position it */
+            value = (operand & 0x1) << encoder->field_loc[i].end;
+            /* Extract and position the 4-bit slice */
+            value |= ((operand & 0x1e) >> 1) << encoder->field_loc[i].start;
+            bits |= value;
+            break;
+          case kFmtImm12:
+          case kFmtModImm:
+            value = ((operand & 0x800) >> 11) << 26;
+            value |= ((operand & 0x700) >> 8) << 12;
+            value |= operand & 0x0ff;
+            bits |= value;
+            break;
+          case kFmtImm16:
+            value = ((operand & 0x0800) >> 11) << 26;
+            value |= ((operand & 0xf000) >> 12) << 16;
+            value |= ((operand & 0x0700) >> 8) << 12;
+            value |= operand & 0x0ff;
+            bits |= value;
+            break;
+          case kFmtOff24: {
+            uint32_t signbit = (operand >> 31) & 0x1;
+            uint32_t i1 = (operand >> 22) & 0x1;
+            uint32_t i2 = (operand >> 21) & 0x1;
+            uint32_t imm10 = (operand >> 11) & 0x03ff;
+            uint32_t imm11 = operand & 0x07ff;
+            uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
+            uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
+            value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
+                imm11;
+            bits |= value;
+            }
+            break;
+          default:
+            LOG(FATAL) << "Bad fmt:" << encoder->field_loc[i].kind;
+        }
       }
     }
     if (encoder->size == 4) {
-      code_buffer_.push_back((bits >> 16) & 0xff);
-      code_buffer_.push_back((bits >> 24) & 0xff);
+      lir->u.a.bytes[0] = ((bits >> 16) & 0xff);
+      lir->u.a.bytes[1] = ((bits >> 24) & 0xff);
+      lir->u.a.bytes[2] = (bits & 0xff);
+      lir->u.a.bytes[3] = ((bits >> 8) & 0xff);
+    } else {
+      DCHECK_EQ(encoder->size, 2);
+      lir->u.a.bytes[0] = (bits & 0xff);
+      lir->u.a.bytes[1] = ((bits >> 8) & 0xff);
     }
-    code_buffer_.push_back(bits & 0xff);
-    code_buffer_.push_back((bits >> 8) & 0xff);
+    lir->flags.size = encoder->size;
   }
-  return res;
+}
+
+// Assemble the LIR into binary instruction format.
+void ArmMir2Lir::AssembleLIR() {
+  LIR* lir;
+  LIR* prev_lir;
+  int assembler_retries = 0;
+  int starting_offset = EncodeRange(first_lir_insn_, last_lir_insn_, 0);
+  data_offset_ = (starting_offset + 0x3) & ~0x3;
+  int offset_adjustment;
+  AssignDataOffsets();
+
+  /*
+   * Note: generation must be 1 on first pass (to distinguish from initialized state of 0 for non-visited nodes).
+   * Start at zero here, and bit will be flipped to 1 on entry to the loop.
+   */
+  int generation = 0;
+  while (true) {
+    offset_adjustment = 0;
+    AssemblerStatus res = kSuccess;  // Assume success
+    generation ^= 1;
+    // Note: nodes requring possible fixup linked in ascending order.
+    lir = first_fixup_;
+    prev_lir = NULL;
+    while (lir != NULL) {
+      /*
+       * NOTE: the lir being considered here will be encoded following the switch (so long as
+       * we're not in a retry situation).  However, any new non-pc_rel instructions inserted
+       * due to retry must be explicitly encoded at the time of insertion.  Note that
+       * inserted instructions don't need use/def flags, but do need size and pc-rel status
+       * properly updated.
+       */
+      lir->offset += offset_adjustment;
+      // During pass, allows us to tell whether a node has been updated with offset_adjustment yet.
+      lir->flags.generation = generation;
+      switch (static_cast<FixupKind>(lir->flags.fixup)) {
+        case kFixupLabel:
+        case kFixupNone:
+          break;
+        case kFixupVLoad:
+          if (lir->operands[1] != r15pc) {
+            break;
+          }
+          // NOTE: intentional fallthrough.
+        case kFixupLoad: {
+          /*
+           * PC-relative loads are mostly used to load immediates
+           * that are too large to materialize directly in one shot.
+           * However, if the load displacement exceeds the limit,
+           * we revert to a multiple-instruction materialization sequence.
+           */
+          LIR *lir_target = lir->target;
+          uintptr_t pc = (lir->offset + 4) & ~3;
+          uintptr_t target = lir_target->offset +
+              ((lir_target->flags.generation == lir->flags.generation) ? 0 : offset_adjustment);
+          int delta = target - pc;
+          if (res != kSuccess) {
+            /*
+             * In this case, we're just estimating and will do it again for real.  Ensure offset
+             * is legal.
+             */
+            delta &= ~0x3;
+          }
+          DCHECK_EQ((delta & 0x3), 0);
+          // First, a sanity check for cases we shouldn't see now
+          if (kIsDebugBuild && (((lir->opcode == kThumbAddPcRel) && (delta > 1020)) ||
+              ((lir->opcode == kThumbLdrPcRel) && (delta > 1020)))) {
+            // Shouldn't happen in current codegen.
+            LOG(FATAL) << "Unexpected pc-rel offset " << delta;
+          }
+          // Now, check for the difficult cases
+          if (((lir->opcode == kThumb2LdrPcRel12) && (delta > 4091)) ||
+              ((lir->opcode == kThumb2LdrdPcRel8) && (delta > 1020)) ||
+              ((lir->opcode == kThumb2Vldrs) && (delta > 1020)) ||
+              ((lir->opcode == kThumb2Vldrd) && (delta > 1020))) {
+            /*
+             * Note: The reason vldrs/vldrd include rARM_LR in their use/def masks is that we
+             * sometimes have to use it to fix up out-of-range accesses.  This is where that
+             * happens.
+             */
+            int base_reg = ((lir->opcode == kThumb2LdrdPcRel8) ||
+                            (lir->opcode == kThumb2LdrPcRel12)) ?  lir->operands[0] : rARM_LR;
+
+            // Add new Adr to generate the address.
+            LIR* new_adr = RawLIR(lir->dalvik_offset, kThumb2Adr,
+                       base_reg, 0, 0, 0, 0, lir->target);
+            new_adr->offset = lir->offset;
+            new_adr->flags.fixup = kFixupAdr;
+            new_adr->flags.size = EncodingMap[kThumb2Adr].size;
+            InsertLIRBefore(lir, new_adr);
+            lir->offset += new_adr->flags.size;
+            offset_adjustment += new_adr->flags.size;
+
+            // lir no longer pcrel, unlink and link in new_adr.
+            ReplaceFixup(prev_lir, lir, new_adr);
+
+            // Convert to normal load.
+            offset_adjustment -= lir->flags.size;
+            if (lir->opcode == kThumb2LdrPcRel12) {
+              lir->opcode = kThumb2LdrRRI12;
+            } else if (lir->opcode == kThumb2LdrdPcRel8) {
+              lir->opcode = kThumb2LdrdI8;
+            }
+            lir->flags.size = EncodingMap[lir->opcode].size;
+            offset_adjustment += lir->flags.size;
+            // Change the load to be relative to the new Adr base.
+            if (lir->opcode == kThumb2LdrdI8) {
+              lir->operands[3] = 0;
+              lir->operands[2] = base_reg;
+            } else {
+              lir->operands[2] = 0;
+              lir->operands[1] = base_reg;
+            }
+            // Must redo encoding here - won't ever revisit this node.
+            EncodeLIR(lir);
+            prev_lir = new_adr;  // Continue scan with new_adr;
+            lir = new_adr->u.a.pcrel_next;
+            res = kRetryAll;
+            continue;
+          } else {
+            if ((lir->opcode == kThumb2Vldrs) ||
+                (lir->opcode == kThumb2Vldrd) ||
+                (lir->opcode == kThumb2LdrdPcRel8)) {
+              lir->operands[2] = delta >> 2;
+            } else {
+              lir->operands[1] = (lir->opcode == kThumb2LdrPcRel12) ?  delta :
+                  delta >> 2;
+            }
+          }
+          break;
+        }
+        case kFixupCBxZ: {
+          LIR *target_lir = lir->target;
+          uintptr_t pc = lir->offset + 4;
+          uintptr_t target = target_lir->offset +
+              ((target_lir->flags.generation == lir->flags.generation) ? 0 : offset_adjustment);
+          int delta = target - pc;
+          if (delta > 126 || delta < 0) {
+            /*
+             * Convert to cmp rx,#0 / b[eq/ne] tgt pair
+             * Make new branch instruction and insert after
+             */
+            LIR* new_inst =
+              RawLIR(lir->dalvik_offset, kThumbBCond, 0,
+                     (lir->opcode == kThumb2Cbz) ? kArmCondEq : kArmCondNe,
+                     0, 0, 0, lir->target);
+            InsertLIRAfter(lir, new_inst);
+
+            /* Convert the cb[n]z to a cmp rx, #0 ] */
+            // Subtract the old size.
+            offset_adjustment -= lir->flags.size;
+            lir->opcode = kThumbCmpRI8;
+            /* operand[0] is src1 in both cb[n]z & CmpRI8 */
+            lir->operands[1] = 0;
+            lir->target = 0;
+            EncodeLIR(lir);   // NOTE: sets flags.size.
+            // Add back the new size.
+            DCHECK_EQ(lir->flags.size, static_cast<uint32_t>(EncodingMap[lir->opcode].size));
+            offset_adjustment += lir->flags.size;
+            // Set up the new following inst.
+            new_inst->offset = lir->offset + lir->flags.size;
+            new_inst->flags.fixup = kFixupCondBranch;
+            new_inst->flags.size = EncodingMap[new_inst->opcode].size;
+            offset_adjustment += new_inst->flags.size;
+
+            // lir no longer pcrel, unlink and link in new_inst.
+            ReplaceFixup(prev_lir, lir, new_inst);
+            prev_lir = new_inst;  // Continue with the new instruction.
+            lir = new_inst->u.a.pcrel_next;
+            res = kRetryAll;
+            continue;
+          } else {
+            lir->operands[1] = delta >> 1;
+          }
+          break;
+        }
+        case kFixupPushPop: {
+          if (__builtin_popcount(lir->operands[0]) == 1) {
+            /*
+             * The standard push/pop multiple instruction
+             * requires at least two registers in the list.
+             * If we've got just one, switch to the single-reg
+             * encoding.
+             */
+            lir->opcode = (lir->opcode == kThumb2Push) ? kThumb2Push1 :
+                kThumb2Pop1;
+            int reg = 0;
+            while (lir->operands[0]) {
+              if (lir->operands[0] & 0x1) {
+                break;
+              } else {
+                reg++;
+                lir->operands[0] >>= 1;
+              }
+            }
+            lir->operands[0] = reg;
+            // This won't change again, don't bother unlinking, just reset fixup kind
+            lir->flags.fixup = kFixupNone;
+          }
+          break;
+        }
+        case kFixupCondBranch: {
+          LIR *target_lir = lir->target;
+          int delta = 0;
+          DCHECK(target_lir);
+          uintptr_t pc = lir->offset + 4;
+          uintptr_t target = target_lir->offset +
+              ((target_lir->flags.generation == lir->flags.generation) ? 0 : offset_adjustment);
+          delta = target - pc;
+          if ((lir->opcode == kThumbBCond) && (delta > 254 || delta < -256)) {
+            offset_adjustment -= lir->flags.size;
+            lir->opcode = kThumb2BCond;
+            lir->flags.size = EncodingMap[lir->opcode].size;
+            // Fixup kind remains the same.
+            offset_adjustment += lir->flags.size;
+            res = kRetryAll;
+          }
+          lir->operands[0] = delta >> 1;
+          break;
+        }
+        case kFixupT2Branch: {
+          LIR *target_lir = lir->target;
+          uintptr_t pc = lir->offset + 4;
+          uintptr_t target = target_lir->offset +
+              ((target_lir->flags.generation == lir->flags.generation) ? 0 : offset_adjustment);
+          int delta = target - pc;
+          lir->operands[0] = delta >> 1;
+          if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && lir->operands[0] == 0) {
+            // Useless branch
+            offset_adjustment -= lir->flags.size;
+            lir->flags.is_nop = true;
+            // Don't unlink - just set to do-nothing.
+            lir->flags.fixup = kFixupNone;
+            res = kRetryAll;
+          }
+          break;
+        }
+        case kFixupT1Branch: {
+          LIR *target_lir = lir->target;
+          uintptr_t pc = lir->offset + 4;
+          uintptr_t target = target_lir->offset +
+              ((target_lir->flags.generation == lir->flags.generation) ? 0 : offset_adjustment);
+          int delta = target - pc;
+          if (delta > 2046 || delta < -2048) {
+            // Convert to Thumb2BCond w/ kArmCondAl
+            offset_adjustment -= lir->flags.size;
+            lir->opcode = kThumb2BUncond;
+            lir->operands[0] = 0;
+            lir->flags.size = EncodingMap[lir->opcode].size;
+            lir->flags.fixup = kFixupT2Branch;
+            offset_adjustment += lir->flags.size;
+            res = kRetryAll;
+          } else {
+            lir->operands[0] = delta >> 1;
+            if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && lir->operands[0] == -1) {
+              // Useless branch
+              offset_adjustment -= lir->flags.size;
+              lir->flags.is_nop = true;
+              // Don't unlink - just set to do-nothing.
+              lir->flags.fixup = kFixupNone;
+              res = kRetryAll;
+            }
+          }
+          break;
+        }
+        case kFixupBlx1: {
+          DCHECK(NEXT_LIR(lir)->opcode == kThumbBlx2);
+          /* cur_pc is Thumb */
+          uintptr_t cur_pc = (lir->offset + 4) & ~3;
+          uintptr_t target = lir->operands[1];
+
+          /* Match bit[1] in target with base */
+          if (cur_pc & 0x2) {
+            target |= 0x2;
+          }
+          int delta = target - cur_pc;
+          DCHECK((delta >= -(1<<22)) && (delta <= ((1<<22)-2)));
+
+          lir->operands[0] = (delta >> 12) & 0x7ff;
+          NEXT_LIR(lir)->operands[0] = (delta>> 1) & 0x7ff;
+          break;
+        }
+        case kFixupBl1: {
+          DCHECK(NEXT_LIR(lir)->opcode == kThumbBl2);
+          /* Both cur_pc and target are Thumb */
+          uintptr_t cur_pc = lir->offset + 4;
+          uintptr_t target = lir->operands[1];
+
+          int delta = target - cur_pc;
+          DCHECK((delta >= -(1<<22)) && (delta <= ((1<<22)-2)));
+
+          lir->operands[0] = (delta >> 12) & 0x7ff;
+          NEXT_LIR(lir)->operands[0] = (delta>> 1) & 0x7ff;
+          break;
+        }
+        case kFixupAdr: {
+          SwitchTable *tab_rec = reinterpret_cast<SwitchTable*>(lir->operands[2]);
+          LIR* target = lir->target;
+          int target_disp = (tab_rec != NULL) ?  tab_rec->offset + offset_adjustment
+              : target->offset + ((target->flags.generation == lir->flags.generation) ? 0 : offset_adjustment);
+          int disp = target_disp - ((lir->offset + 4) & ~3);
+          if (disp < 4096) {
+            lir->operands[1] = disp;
+          } else {
+            // convert to ldimm16l, ldimm16h, add tgt, pc, operands[0]
+            // TUNING: if this case fires often, it can be improved.  Not expected to be common.
+            LIR *new_mov16L =
+                RawLIR(lir->dalvik_offset, kThumb2MovImm16LST,
+                       lir->operands[0], 0, reinterpret_cast<uintptr_t>(lir),
+                       reinterpret_cast<uintptr_t>(tab_rec), 0, lir->target);
+            new_mov16L->flags.size = EncodingMap[new_mov16L->opcode].size;
+            new_mov16L->flags.fixup = kFixupMovImmLST;
+            new_mov16L->offset = lir->offset;
+            // Link the new instruction, retaining lir.
+            InsertLIRBefore(lir, new_mov16L);
+            lir->offset += new_mov16L->flags.size;
+            offset_adjustment += new_mov16L->flags.size;
+            InsertFixupBefore(prev_lir, lir, new_mov16L);
+            prev_lir = new_mov16L;   // Now we've got a new prev.
+
+            LIR *new_mov16H =
+                RawLIR(lir->dalvik_offset, kThumb2MovImm16HST,
+                       lir->operands[0], 0, reinterpret_cast<uintptr_t>(lir),
+                       reinterpret_cast<uintptr_t>(tab_rec), 0, lir->target);
+            new_mov16H->flags.size = EncodingMap[new_mov16H->opcode].size;
+            new_mov16H->flags.fixup = kFixupMovImmHST;
+            new_mov16H->offset = lir->offset;
+            // Link the new instruction, retaining lir.
+            InsertLIRBefore(lir, new_mov16H);
+            lir->offset += new_mov16H->flags.size;
+            offset_adjustment += new_mov16H->flags.size;
+            InsertFixupBefore(prev_lir, lir, new_mov16H);
+            prev_lir = new_mov16H;  // Now we've got a new prev.
+
+            offset_adjustment -= lir->flags.size;
+            if (ARM_LOWREG(lir->operands[0])) {
+              lir->opcode = kThumbAddRRLH;
+            } else {
+              lir->opcode = kThumbAddRRHH;
+            }
+            lir->operands[1] = rARM_PC;
+            lir->flags.size = EncodingMap[lir->opcode].size;
+            offset_adjustment += lir->flags.size;
+            // Must stay in fixup list and have offset updated; will be used by LST/HSP pair.
+            lir->flags.fixup = kFixupNone;
+            res = kRetryAll;
+          }
+          break;
+        }
+        case kFixupMovImmLST: {
+          // operands[1] should hold disp, [2] has add, [3] has tab_rec
+          LIR *addPCInst = reinterpret_cast<LIR*>(lir->operands[2]);
+          SwitchTable *tab_rec = reinterpret_cast<SwitchTable*>(lir->operands[3]);
+          // If tab_rec is null, this is a literal load. Use target
+          LIR* target = lir->target;
+          int target_disp = tab_rec ? tab_rec->offset : target->offset;
+          lir->operands[1] = (target_disp - (addPCInst->offset + 4)) & 0xffff;
+          break;
+        }
+        case kFixupMovImmHST: {
+          // operands[1] should hold disp, [2] has add, [3] has tab_rec
+          LIR *addPCInst = reinterpret_cast<LIR*>(lir->operands[2]);
+          SwitchTable *tab_rec = reinterpret_cast<SwitchTable*>(lir->operands[3]);
+          // If tab_rec is null, this is a literal load. Use target
+          LIR* target = lir->target;
+          int target_disp = tab_rec ? tab_rec->offset : target->offset;
+          lir->operands[1] =
+              ((target_disp - (addPCInst->offset + 4)) >> 16) & 0xffff;
+          break;
+        }
+        case kFixupAlign4: {
+          int required_size = lir->offset & 0x2;
+          if (lir->flags.size != required_size) {
+            offset_adjustment += required_size - lir->flags.size;
+            lir->flags.size = required_size;
+            res = kRetryAll;
+          }
+          break;
+        }
+        default:
+          LOG(FATAL) << "Unexpected case " << lir->flags.fixup;
+      }
+      /*
+       * If one of the pc-relative instructions expanded we'll have
+       * to make another pass.  Don't bother to fully assemble the
+       * instruction.
+       */
+      if (res == kSuccess) {
+        EncodeLIR(lir);
+        if (assembler_retries == 0) {
+          // Go ahead and fix up the code buffer image.
+          for (int i = 0; i < lir->flags.size; i++) {
+            code_buffer_[lir->offset + i] = lir->u.a.bytes[i];
+          }
+        }
+      }
+      prev_lir = lir;
+      lir = lir->u.a.pcrel_next;
+    }
+
+    if (res == kSuccess) {
+      break;
+    } else {
+      assembler_retries++;
+      if (assembler_retries > MAX_ASSEMBLER_RETRIES) {
+        CodegenDump();
+        LOG(FATAL) << "Assembler error - too many retries";
+      }
+      starting_offset += offset_adjustment;
+      data_offset_ = (starting_offset + 0x3) & ~0x3;
+      AssignDataOffsets();
+    }
+  }
+
+  // Rebuild the CodeBuffer if we had to retry; otherwise it should be good as-is.
+  if (assembler_retries != 0) {
+    code_buffer_.clear();
+    for (LIR* lir = first_lir_insn_; lir != NULL; lir = NEXT_LIR(lir)) {
+      if (lir->flags.is_nop) {
+        continue;
+      } else  {
+        for (int i = 0; i < lir->flags.size; i++) {
+          code_buffer_.push_back(lir->u.a.bytes[i]);
+        }
+      }
+    }
+  }
+
+  data_offset_ = (code_buffer_.size() + 0x3) & ~0x3;
+
+  // Install literals
+  InstallLiteralPools();
+
+  // Install switch tables
+  InstallSwitchTables();
+
+  // Install fill array data
+  InstallFillArrayData();
+
+  // Create the mapping table and native offset to reference map.
+  CreateMappingTables();
+
+  CreateNativeGcMap();
 }
 
 int ArmMir2Lir::GetInsnSize(LIR* lir) {
   return EncodingMap[lir->opcode].size;
 }
 
+// Encode instruction bit pattern and assign offsets.
+uint32_t ArmMir2Lir::EncodeRange(LIR* head_lir, LIR* tail_lir, uint32_t offset) {
+  LIR* end_lir = tail_lir->next;
+
+  /*
+   * A significant percentage of methods can be assembled in a single pass.  We'll
+   * go ahead and build the code image here, leaving holes for pc-relative fixup
+   * codes.  If the code size changes during that pass, we'll have to throw away
+   * this work - but if not, we're ready to go.
+   */
+  code_buffer_.reserve(estimated_native_code_size_ + 256);  // Add a little slop.
+  LIR* last_fixup = NULL;
+  for (LIR* lir = head_lir; lir != end_lir; lir = NEXT_LIR(lir)) {
+    lir->offset = offset;
+    if (!lir->flags.is_nop) {
+      if (lir->flags.fixup != kFixupNone) {
+        if (lir->opcode >= 0) {
+          lir->flags.size = EncodingMap[lir->opcode].size;
+          lir->flags.fixup = EncodingMap[lir->opcode].fixup;
+        } else if (UNLIKELY(lir->opcode == kPseudoPseudoAlign4)) {
+          lir->flags.size = (offset & 0x2);
+          lir->flags.fixup = kFixupAlign4;
+        } else {
+          lir->flags.size = 0;
+          lir->flags.fixup = kFixupLabel;
+        }
+        // Link into the fixup chain.
+        lir->flags.use_def_invalid = true;
+        lir->u.a.pcrel_next = NULL;
+        if (first_fixup_ == NULL) {
+          first_fixup_ = lir;
+        } else {
+          last_fixup->u.a.pcrel_next = lir;
+        }
+        last_fixup = lir;
+      } else {
+        EncodeLIR(lir);
+      }
+      for (int i = 0; i < lir->flags.size; i++) {
+        code_buffer_.push_back(lir->u.a.bytes[i]);
+      }
+      offset += lir->flags.size;
+    }
+  }
+  return offset;
+}
+
+void ArmMir2Lir::AssignDataOffsets() {
+  /* Set up offsets for literals */
+  int offset = data_offset_;
+
+  offset = AssignLiteralOffset(offset);
+
+  offset = AssignSwitchTablesOffset(offset);
+
+  total_size_ = AssignFillArrayDataOffset(offset);
+}
+
 }  // namespace art
diff --git a/compiler/dex/quick/arm/codegen_arm.h b/compiler/dex/quick/arm/codegen_arm.h
index 1954fba..b75661c 100644
--- a/compiler/dex/quick/arm/codegen_arm.h
+++ b/compiler/dex/quick/arm/codegen_arm.h
@@ -70,9 +70,14 @@
     void CompilerInitializeRegAlloc();
 
     // Required for target - miscellaneous.
+    void AssembleLIR();
+    uint32_t EncodeRange(LIR* head_lir, LIR* tail_lir, uint32_t starting_offset);
+    int AssignInsnOffsets();
+    void AssignOffsets();
     AssemblerStatus AssembleInstructions(uintptr_t start_addr);
+    void EncodeLIR(LIR* lir);
     void DumpResourceMask(LIR* lir, uint64_t mask, const char* prefix);
-    void SetupTargetResourceMasks(LIR* lir);
+    void SetupTargetResourceMasks(LIR* lir, uint64_t flags);
     const char* GetTargetInstFmt(int opcode);
     const char* GetTargetInstName(int opcode);
     std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr);
@@ -187,6 +192,9 @@
     MIR* SpecialIdentity(MIR* mir);
     LIR* LoadFPConstantValue(int r_dest, int value);
     bool BadOverlap(RegLocation rl_src, RegLocation rl_dest);
+    void ReplaceFixup(LIR* prev_lir, LIR* orig_lir, LIR* new_lir);
+    void InsertFixupBefore(LIR* prev_lir, LIR* orig_lir, LIR* new_lir);
+    void AssignDataOffsets();
 };
 
 }  // namespace art
diff --git a/compiler/dex/quick/arm/int_arm.cc b/compiler/dex/quick/arm/int_arm.cc
index 07782d9..9b0fa62 100644
--- a/compiler/dex/quick/arm/int_arm.cc
+++ b/compiler/dex/quick/arm/int_arm.cc
@@ -319,7 +319,18 @@
   LIR* branch;
   int mod_imm;
   ArmConditionCode arm_cond = ArmConditionEncoding(cond);
-  if ((ARM_LOWREG(reg)) && (check_value == 0) &&
+  /*
+   * A common use of OpCmpImmBranch is for null checks, and using the Thumb 16-bit
+   * compare-and-branch if zero is ideal if it will reach.  However, because null checks
+   * branch forward to a launch pad, they will frequently not reach - and thus have to
+   * be converted to a long form during assembly (which will trigger another assembly
+   * pass).  Here we estimate the branch distance for checks, and if large directly
+   * generate the long form in an attempt to avoid an extra assembly pass.
+   * TODO: consider interspersing launchpads in code following unconditional branches.
+   */
+  bool skip = ((target != NULL) && (target->opcode == kPseudoThrowTarget));
+  skip &= ((cu_->code_item->insns_size_in_code_units_ - current_dalvik_offset_) > 64);
+  if (!skip && (ARM_LOWREG(reg)) && (check_value == 0) &&
      ((arm_cond == kArmCondEq) || (arm_cond == kArmCondNe))) {
     branch = NewLIR2((arm_cond == kArmCondEq) ? kThumb2Cbz : kThumb2Cbnz,
                      reg, 0);
@@ -624,7 +635,7 @@
       break;
   }
   LIR* dmb = NewLIR1(kThumb2Dmb, dmb_flavor);
-  dmb->def_mask = ENCODE_ALL;
+  dmb->u.m.def_mask = ENCODE_ALL;
 #endif
 }
 
diff --git a/compiler/dex/quick/arm/target_arm.cc b/compiler/dex/quick/arm/target_arm.cc
index 203a8cc..a4ea10b 100644
--- a/compiler/dex/quick/arm/target_arm.cc
+++ b/compiler/dex/quick/arm/target_arm.cc
@@ -118,78 +118,83 @@
   return ENCODE_ARM_REG_PC;
 }
 
-void ArmMir2Lir::SetupTargetResourceMasks(LIR* lir) {
+// Thumb2 specific setup.  TODO: inline?:
+void ArmMir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags) {
   DCHECK_EQ(cu_->instruction_set, kThumb2);
+  DCHECK(!lir->flags.use_def_invalid);
 
-  // Thumb2 specific setup
-  uint64_t flags = ArmMir2Lir::EncodingMap[lir->opcode].flags;
   int opcode = lir->opcode;
 
-  if (flags & REG_DEF_SP) {
-    lir->def_mask |= ENCODE_ARM_REG_SP;
-  }
-
-  if (flags & REG_USE_SP) {
-    lir->use_mask |= ENCODE_ARM_REG_SP;
-  }
-
-  if (flags & REG_DEF_LIST0) {
-    lir->def_mask |= ENCODE_ARM_REG_LIST(lir->operands[0]);
-  }
-
-  if (flags & REG_DEF_LIST1) {
-    lir->def_mask |= ENCODE_ARM_REG_LIST(lir->operands[1]);
-  }
-
-  if (flags & REG_DEF_FPCS_LIST0) {
-    lir->def_mask |= ENCODE_ARM_REG_FPCS_LIST(lir->operands[0]);
-  }
-
-  if (flags & REG_DEF_FPCS_LIST2) {
-    for (int i = 0; i < lir->operands[2]; i++) {
-      SetupRegMask(&lir->def_mask, lir->operands[1] + i);
+  // These flags are somewhat uncommon - bypass if we can.
+  if ((flags & (REG_DEF_SP | REG_USE_SP | REG_DEF_LIST0 | REG_DEF_LIST1 |
+                REG_DEF_FPCS_LIST0 | REG_DEF_FPCS_LIST2 | REG_USE_PC | IS_IT | REG_USE_LIST0 |
+                REG_USE_LIST1 | REG_USE_FPCS_LIST0 | REG_USE_FPCS_LIST2 | REG_DEF_LR)) != 0) {
+    if (flags & REG_DEF_SP) {
+      lir->u.m.def_mask |= ENCODE_ARM_REG_SP;
     }
-  }
 
-  if (flags & REG_USE_PC) {
-    lir->use_mask |= ENCODE_ARM_REG_PC;
-  }
-
-  /* Conservatively treat the IT block */
-  if (flags & IS_IT) {
-    lir->def_mask = ENCODE_ALL;
-  }
-
-  if (flags & REG_USE_LIST0) {
-    lir->use_mask |= ENCODE_ARM_REG_LIST(lir->operands[0]);
-  }
-
-  if (flags & REG_USE_LIST1) {
-    lir->use_mask |= ENCODE_ARM_REG_LIST(lir->operands[1]);
-  }
-
-  if (flags & REG_USE_FPCS_LIST0) {
-    lir->use_mask |= ENCODE_ARM_REG_FPCS_LIST(lir->operands[0]);
-  }
-
-  if (flags & REG_USE_FPCS_LIST2) {
-    for (int i = 0; i < lir->operands[2]; i++) {
-      SetupRegMask(&lir->use_mask, lir->operands[1] + i);
+    if (flags & REG_USE_SP) {
+      lir->u.m.use_mask |= ENCODE_ARM_REG_SP;
     }
-  }
-  /* Fixup for kThumbPush/lr and kThumbPop/pc */
-  if (opcode == kThumbPush || opcode == kThumbPop) {
-    uint64_t r8Mask = GetRegMaskCommon(r8);
-    if ((opcode == kThumbPush) && (lir->use_mask & r8Mask)) {
-      lir->use_mask &= ~r8Mask;
-      lir->use_mask |= ENCODE_ARM_REG_LR;
-    } else if ((opcode == kThumbPop) && (lir->def_mask & r8Mask)) {
-      lir->def_mask &= ~r8Mask;
-      lir->def_mask |= ENCODE_ARM_REG_PC;
+
+    if (flags & REG_DEF_LIST0) {
+      lir->u.m.def_mask |= ENCODE_ARM_REG_LIST(lir->operands[0]);
     }
-  }
-  if (flags & REG_DEF_LR) {
-    lir->def_mask |= ENCODE_ARM_REG_LR;
+
+    if (flags & REG_DEF_LIST1) {
+      lir->u.m.def_mask |= ENCODE_ARM_REG_LIST(lir->operands[1]);
+    }
+
+    if (flags & REG_DEF_FPCS_LIST0) {
+      lir->u.m.def_mask |= ENCODE_ARM_REG_FPCS_LIST(lir->operands[0]);
+    }
+
+    if (flags & REG_DEF_FPCS_LIST2) {
+      for (int i = 0; i < lir->operands[2]; i++) {
+        SetupRegMask(&lir->u.m.def_mask, lir->operands[1] + i);
+      }
+    }
+
+    if (flags & REG_USE_PC) {
+      lir->u.m.use_mask |= ENCODE_ARM_REG_PC;
+    }
+
+    /* Conservatively treat the IT block */
+    if (flags & IS_IT) {
+      lir->u.m.def_mask = ENCODE_ALL;
+    }
+
+    if (flags & REG_USE_LIST0) {
+      lir->u.m.use_mask |= ENCODE_ARM_REG_LIST(lir->operands[0]);
+    }
+
+    if (flags & REG_USE_LIST1) {
+      lir->u.m.use_mask |= ENCODE_ARM_REG_LIST(lir->operands[1]);
+    }
+
+    if (flags & REG_USE_FPCS_LIST0) {
+      lir->u.m.use_mask |= ENCODE_ARM_REG_FPCS_LIST(lir->operands[0]);
+    }
+
+    if (flags & REG_USE_FPCS_LIST2) {
+      for (int i = 0; i < lir->operands[2]; i++) {
+        SetupRegMask(&lir->u.m.use_mask, lir->operands[1] + i);
+      }
+    }
+    /* Fixup for kThumbPush/lr and kThumbPop/pc */
+    if (opcode == kThumbPush || opcode == kThumbPop) {
+      uint64_t r8Mask = GetRegMaskCommon(r8);
+      if ((opcode == kThumbPush) && (lir->u.m.use_mask & r8Mask)) {
+        lir->u.m.use_mask &= ~r8Mask;
+        lir->u.m.use_mask |= ENCODE_ARM_REG_LR;
+      } else if ((opcode == kThumbPop) && (lir->u.m.def_mask & r8Mask)) {
+        lir->u.m.def_mask &= ~r8Mask;
+        lir->u.m.def_mask |= ENCODE_ARM_REG_PC;
+      }
+    }
+    if (flags & REG_DEF_LR) {
+      lir->u.m.def_mask |= ENCODE_ARM_REG_LR;
+    }
   }
 }
 
@@ -466,8 +471,8 @@
 
     /* Memory bits */
     if (arm_lir && (mask & ENCODE_DALVIK_REG)) {
-      sprintf(buf + strlen(buf), "dr%d%s", arm_lir->alias_info & 0xffff,
-              (arm_lir->alias_info & 0x80000000) ? "(+1)" : "");
+      sprintf(buf + strlen(buf), "dr%d%s", DECODE_ALIAS_INFO_REG(arm_lir->flags.alias_info),
+              DECODE_ALIAS_INFO_WIDE(arm_lir->flags.alias_info) ? "(+1)" : "");
     }
     if (mask & ENCODE_LITERAL) {
       strcat(buf, "lit ");
diff --git a/compiler/dex/quick/arm/utility_arm.cc b/compiler/dex/quick/arm/utility_arm.cc
index c63de69..a7b8dfe 100644
--- a/compiler/dex/quick/arm/utility_arm.cc
+++ b/compiler/dex/quick/arm/utility_arm.cc
@@ -90,7 +90,6 @@
   LIR* load_pc_rel = RawLIR(current_dalvik_offset_, kThumb2Vldrs,
                           r_dest, r15pc, 0, 0, 0, data_target);
   SetMemRefType(load_pc_rel, true, kLiteral);
-  load_pc_rel->alias_info = reinterpret_cast<uintptr_t>(data_target);
   AppendLIR(load_pc_rel);
   return load_pc_rel;
 }
@@ -626,7 +625,6 @@
                    r_dest_lo, r_dest_hi, r15pc, 0, 0, data_target);
     }
     SetMemRefType(res, true, kLiteral);
-    res->alias_info = reinterpret_cast<uintptr_t>(data_target);
     AppendLIR(res);
   }
   return res;