Merge "Template BitTable based on the accessors."
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 58a35dd..fb5d933 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -56,14 +56,14 @@
     DCHECK_EQ(num_dex_registers_, num_dex_registers) << "Inconsistent register count";
   }
 
-  current_stack_map_ = BitTableBuilder<StackMap::kCount>::Entry();
+  current_stack_map_ = BitTableBuilder<StackMap>::Entry();
   current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind);
   current_stack_map_[StackMap::kPackedNativePc] =
       StackMap::PackNativePc(native_pc_offset, instruction_set_);
   current_stack_map_[StackMap::kDexPc] = dex_pc;
   if (register_mask != 0) {
     uint32_t shift = LeastSignificantBit(register_mask);
-    BitTableBuilder<RegisterMask::kCount>::Entry entry;
+    BitTableBuilder<RegisterMask>::Entry entry;
     entry[RegisterMask::kValue] = register_mask >> shift;
     entry[RegisterMask::kShift] = shift;
     current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry);
@@ -126,7 +126,7 @@
 void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
   uint32_t packed_native_pc = current_stack_map_[StackMap::kPackedNativePc];
   size_t invoke_info_index = invoke_infos_.size();
-  BitTableBuilder<InvokeInfo::kCount>::Entry entry;
+  BitTableBuilder<InvokeInfo>::Entry entry;
   entry[InvokeInfo::kPackedNativePc] = packed_native_pc;
   entry[InvokeInfo::kInvokeType] = invoke_type;
   entry[InvokeInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index});
@@ -153,7 +153,7 @@
 
   expected_num_dex_registers_ += num_dex_registers;
 
-  BitTableBuilder<InlineInfo::kCount>::Entry entry;
+  BitTableBuilder<InlineInfo>::Entry entry;
   entry[InlineInfo::kIsLast] = InlineInfo::kMore;
   entry[InlineInfo::kDexPc] = dex_pc;
   entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_);
@@ -215,7 +215,7 @@
     // Distance is difference between this index and the index of last modification.
     uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
     if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
-      BitTableBuilder<DexRegisterInfo::kCount>::Entry entry;
+      BitTableBuilder<DexRegisterInfo>::Entry entry;
       entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind());
       entry[DexRegisterInfo::kPackedValue] =
           DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue());
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 6842d9f..7d1820d 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -103,26 +103,26 @@
   void CreateDexRegisterMap();
 
   const InstructionSet instruction_set_;
-  BitTableBuilder<StackMap::kCount> stack_maps_;
-  BitTableBuilder<RegisterMask::kCount> register_masks_;
+  BitTableBuilder<StackMap> stack_maps_;
+  BitTableBuilder<RegisterMask> register_masks_;
   BitmapTableBuilder stack_masks_;
-  BitTableBuilder<InvokeInfo::kCount> invoke_infos_;
-  BitTableBuilder<InlineInfo::kCount> inline_infos_;
+  BitTableBuilder<InvokeInfo> invoke_infos_;
+  BitTableBuilder<InlineInfo> inline_infos_;
   BitmapTableBuilder dex_register_masks_;
-  BitTableBuilder<MaskInfo::kCount> dex_register_maps_;
-  BitTableBuilder<DexRegisterInfo::kCount> dex_register_catalog_;
+  BitTableBuilder<MaskInfo> dex_register_maps_;
+  BitTableBuilder<DexRegisterInfo> dex_register_catalog_;
   uint32_t num_dex_registers_ = 0;  // TODO: Make this const and get the value in constructor.
   ScopedArenaVector<uint8_t> out_;
 
-  BitTableBuilder<1> method_infos_;
+  BitTableBuilderBase<1> method_infos_;
 
   ScopedArenaVector<BitVector*> lazy_stack_masks_;
 
   // Variables which track the current state between Begin/End calls;
   bool in_stack_map_;
   bool in_inline_info_;
-  BitTableBuilder<StackMap::kCount>::Entry current_stack_map_;
-  ScopedArenaVector<BitTableBuilder<InlineInfo::kCount>::Entry> current_inline_infos_;
+  BitTableBuilder<StackMap>::Entry current_stack_map_;
+  ScopedArenaVector<BitTableBuilder<InlineInfo>::Entry> current_inline_infos_;
   ScopedArenaVector<DexRegisterLocation> current_dex_registers_;
   ScopedArenaVector<DexRegisterLocation> previous_dex_registers_;
   ScopedArenaVector<uint32_t> dex_register_timestamp_;  // Stack map index of last change.
@@ -131,7 +131,7 @@
   // Temporary variables used in CreateDexRegisterMap.
   // They are here so that we can reuse the reserved memory.
   ArenaBitVector temp_dex_register_mask_;
-  ScopedArenaVector<BitTableBuilder<DexRegisterMapInfo::kCount>::Entry> temp_dex_register_map_;
+  ScopedArenaVector<BitTableBuilder<DexRegisterMapInfo>::Entry> temp_dex_register_map_;
 
   // A set of lambda functions to be executed at the end to verify
   // the encoded data. It is generally only used in debug builds.
diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h
index 2cc1a31..053bf1f 100644
--- a/libartbase/base/bit_table.h
+++ b/libartbase/base/bit_table.h
@@ -64,58 +64,17 @@
   }
 }
 
+// Generic purpose table of uint32_t values, which are tightly packed at bit level.
+// It has its own header with the number of rows and the bit-widths of all columns.
+// The values are accessible by (row, column).  The value -1 is stored efficiently.
 template<uint32_t kNumColumns>
-class BitTable {
+class BitTableBase {
  public:
-  class Accessor {
-   public:
-    static constexpr uint32_t kCount = kNumColumns;
-    static constexpr uint32_t kNoValue = std::numeric_limits<uint32_t>::max();
+  static constexpr uint32_t kNoValue = std::numeric_limits<uint32_t>::max();  // == -1.
+  static constexpr uint32_t kValueBias = kNoValue;  // Bias so that -1 is encoded as 0.
 
-    Accessor() {}
-    Accessor(const BitTable* table, uint32_t row) : table_(table), row_(row) {}
-
-    ALWAYS_INLINE uint32_t Row() const { return row_; }
-
-    ALWAYS_INLINE bool IsValid() const { return table_ != nullptr && row_ < table_->NumRows(); }
-
-    template<uint32_t Column>
-    ALWAYS_INLINE uint32_t Get() const {
-      static_assert(Column < kNumColumns, "Column out of bounds");
-      return table_->Get(row_, Column);
-    }
-
-    ALWAYS_INLINE bool Equals(const Accessor& other) {
-      return this->table_ == other.table_ && this->row_ == other.row_;
-    }
-
-// Helper macro to create constructors and per-table utilities in derived class.
-#define BIT_TABLE_HEADER()                                                     \
-    using BitTable<kCount>::Accessor::Accessor; /* inherit the constructors */ \
-    template<int COLUMN, int UNUSED /*needed to compile*/> struct ColumnName;  \
-
-// Helper macro to create named column accessors in derived class.
-#define BIT_TABLE_COLUMN(COLUMN, NAME)                                         \
-    static constexpr uint32_t k##NAME = COLUMN;                                \
-    ALWAYS_INLINE uint32_t Get##NAME() const {                                 \
-      return table_->Get(row_, COLUMN);                                        \
-    }                                                                          \
-    ALWAYS_INLINE bool Has##NAME() const {                                     \
-      return table_->Get(row_, COLUMN) != kNoValue;                            \
-    }                                                                          \
-    template<int UNUSED> struct ColumnName<COLUMN, UNUSED> {                   \
-      static constexpr const char* Value = #NAME;                              \
-    };                                                                         \
-
-   protected:
-    const BitTable* table_ = nullptr;
-    uint32_t row_ = -1;
-  };
-
-  static constexpr uint32_t kValueBias = -1;
-
-  BitTable() {}
-  BitTable(void* data, size_t size, size_t* bit_offset = 0) {
+  BitTableBase() {}
+  BitTableBase(void* data, size_t size, size_t* bit_offset) {
     Decode(BitMemoryRegion(MemoryRegion(data, size)), bit_offset);
   }
 
@@ -162,6 +121,7 @@
   }
 
   size_t HeaderBitSize() const { return header_bit_size_; }
+
   size_t BitSize() const { return header_bit_size_ + table_data_.size_in_bits(); }
 
  protected:
@@ -172,6 +132,45 @@
   uint16_t header_bit_size_ = 0;
 };
 
+// Helper class which can be used to create BitTable accessors with named getters.
+template<uint32_t NumColumns>
+class BitTableAccessor {
+ public:
+  static constexpr uint32_t kNumColumns = NumColumns;
+  static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue;
+
+  BitTableAccessor() {}
+  BitTableAccessor(const BitTableBase<kNumColumns>* table, uint32_t row)
+      : table_(table), row_(row) {
+  }
+
+  ALWAYS_INLINE uint32_t Row() const { return row_; }
+
+  ALWAYS_INLINE bool IsValid() const { return table_ != nullptr && row_ < table_->NumRows(); }
+
+  ALWAYS_INLINE bool Equals(const BitTableAccessor& other) {
+    return this->table_ == other.table_ && this->row_ == other.row_;
+  }
+
+// Helper macro to create constructors and per-table utilities in derived class.
+#define BIT_TABLE_HEADER()                                                           \
+  using BitTableAccessor<kNumColumns>::BitTableAccessor; /* inherit constructors */  \
+  template<int COLUMN, int UNUSED /*needed to compile*/> struct ColumnName;          \
+
+// Helper macro to create named column accessors in derived class.
+#define BIT_TABLE_COLUMN(COLUMN, NAME)                                               \
+  static constexpr uint32_t k##NAME = COLUMN;                                        \
+  ALWAYS_INLINE uint32_t Get##NAME() const { return table_->Get(row_, COLUMN); }     \
+  ALWAYS_INLINE bool Has##NAME() const { return Get##NAME() != kNoValue; }           \
+  template<int UNUSED> struct ColumnName<COLUMN, UNUSED> {                           \
+    static constexpr const char* Value = #NAME;                                      \
+  };                                                                                 \
+
+ protected:
+  const BitTableBase<kNumColumns>* table_ = nullptr;
+  uint32_t row_ = -1;
+};
+
 // Template meta-programming helper.
 template<typename Accessor, size_t... Columns>
 static const char* const* GetBitTableColumnNamesImpl(std::index_sequence<Columns...>) {
@@ -179,19 +178,34 @@
   return names;
 }
 
+// Returns the names of all columns in the given accessor.
 template<typename Accessor>
 static const char* const* GetBitTableColumnNames() {
-  return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kCount>());
+  return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kNumColumns>());
 }
 
+// Wrapper which makes it easier to use named accessors for the individual rows.
+template<typename Accessor>
+class BitTable : public BitTableBase<Accessor::kNumColumns> {
+ public:
+  using BitTableBase<Accessor::kNumColumns>::BitTableBase;  // Constructors.
+
+  ALWAYS_INLINE Accessor GetRow(uint32_t row) const {
+    return Accessor(this, row);
+  }
+};
+
 // Helper class for encoding BitTable. It can optionally de-duplicate the inputs.
 template<uint32_t kNumColumns>
-class BitTableBuilder {
+class BitTableBuilderBase {
  public:
+  static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue;
+  static constexpr uint32_t kValueBias = BitTableBase<kNumColumns>::kValueBias;
+
   class Entry {
    public:
     Entry() {
-      std::fill_n(data_, kNumColumns, BitTable<kNumColumns>::Accessor::kNoValue);
+      std::fill_n(data_, kNumColumns, kNoValue);
     }
 
     Entry(std::initializer_list<uint32_t> values) {
@@ -213,7 +227,7 @@
     uint32_t data_[kNumColumns];
   };
 
-  explicit BitTableBuilder(ScopedArenaAllocator* allocator)
+  explicit BitTableBuilderBase(ScopedArenaAllocator* allocator)
       : rows_(allocator->Adapter(kArenaAllocBitTableBuilder)),
         dedup_(8, allocator->Adapter(kArenaAllocBitTableBuilder)) {
   }
@@ -266,7 +280,7 @@
     std::fill_n(max_column_value, kNumColumns, 0);
     for (uint32_t r = 0; r < size(); r++) {
       for (uint32_t c = 0; c < kNumColumns; c++) {
-        max_column_value[c] |= rows_[r][c] - BitTable<kNumColumns>::kValueBias;
+        max_column_value[c] |= rows_[r][c] - kValueBias;
       }
     }
     for (uint32_t c = 0; c < kNumColumns; c++) {
@@ -277,7 +291,6 @@
   // Encode the stored data into a BitTable.
   template<typename Vector>
   void Encode(Vector* out, size_t* bit_offset) const {
-    constexpr uint32_t bias = BitTable<kNumColumns>::kValueBias;
     size_t initial_bit_offset = *bit_offset;
 
     std::array<uint32_t, kNumColumns> column_bits;
@@ -295,14 +308,14 @@
       BitMemoryRegion region(MemoryRegion(out->data(), out->size()));
       for (uint32_t r = 0; r < size(); r++) {
         for (uint32_t c = 0; c < kNumColumns; c++) {
-          region.StoreBitsAndAdvance(bit_offset, rows_[r][c] - bias, column_bits[c]);
+          region.StoreBitsAndAdvance(bit_offset, rows_[r][c] - kValueBias, column_bits[c]);
         }
       }
     }
 
     // Verify the written data.
     if (kIsDebugBuild) {
-      BitTable<kNumColumns> table;
+      BitTableBase<kNumColumns> table;
       BitMemoryRegion region(MemoryRegion(out->data(), out->size()));
       table.Decode(region, &initial_bit_offset);
       DCHECK_EQ(size(), table.NumRows());
@@ -322,6 +335,12 @@
   ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_;  // Hash -> row index.
 };
 
+template<typename Accessor>
+class BitTableBuilder : public BitTableBuilderBase<Accessor::kNumColumns> {
+ public:
+  using BitTableBuilderBase<Accessor::kNumColumns>::BitTableBuilderBase;  // Constructors.
+};
+
 // Helper class for encoding single-column BitTable of bitmaps (allows more than 32 bits).
 class BitmapTableBuilder {
  public:
@@ -384,7 +403,7 @@
 
     // Verify the written data.
     if (kIsDebugBuild) {
-      BitTable<1> table;
+      BitTableBase<1> table;
       BitMemoryRegion region(MemoryRegion(out->data(), out->size()));
       table.Decode(region, &initial_bit_offset);
       DCHECK_EQ(size(), table.NumRows());
diff --git a/libartbase/base/bit_table_test.cc b/libartbase/base/bit_table_test.cc
index 969940f..ee7cb3a 100644
--- a/libartbase/base/bit_table_test.cc
+++ b/libartbase/base/bit_table_test.cc
@@ -50,11 +50,11 @@
 
   std::vector<uint8_t> buffer;
   size_t encode_bit_offset = 0;
-  BitTableBuilder<1> builder(&allocator);
+  BitTableBuilderBase<1> builder(&allocator);
   builder.Encode(&buffer, &encode_bit_offset);
 
   size_t decode_bit_offset = 0;
-  BitTable<1> table(buffer.data(), buffer.size(), &decode_bit_offset);
+  BitTableBase<1> table(buffer.data(), buffer.size(), &decode_bit_offset);
   EXPECT_EQ(encode_bit_offset, decode_bit_offset);
   EXPECT_EQ(0u, table.NumRows());
 }
@@ -67,7 +67,7 @@
   constexpr uint32_t kNoValue = -1;
   std::vector<uint8_t> buffer;
   size_t encode_bit_offset = 0;
-  BitTableBuilder<1> builder(&allocator);
+  BitTableBuilderBase<1> builder(&allocator);
   builder.Add({42u});
   builder.Add({kNoValue});
   builder.Add({1000u});
@@ -75,7 +75,7 @@
   builder.Encode(&buffer, &encode_bit_offset);
 
   size_t decode_bit_offset = 0;
-  BitTable<1> table(buffer.data(), buffer.size(), &decode_bit_offset);
+  BitTableBase<1> table(buffer.data(), buffer.size(), &decode_bit_offset);
   EXPECT_EQ(encode_bit_offset, decode_bit_offset);
   EXPECT_EQ(4u, table.NumRows());
   EXPECT_EQ(42u, table.Get(0));
@@ -93,12 +93,12 @@
   for (size_t start_bit_offset = 0; start_bit_offset <= 32; start_bit_offset++) {
     std::vector<uint8_t> buffer;
     size_t encode_bit_offset = start_bit_offset;
-    BitTableBuilder<1> builder(&allocator);
+    BitTableBuilderBase<1> builder(&allocator);
     builder.Add({42u});
     builder.Encode(&buffer, &encode_bit_offset);
 
     size_t decode_bit_offset = start_bit_offset;
-    BitTable<1> table(buffer.data(), buffer.size(), &decode_bit_offset);
+    BitTableBase<1> table(buffer.data(), buffer.size(), &decode_bit_offset);
     EXPECT_EQ(encode_bit_offset, decode_bit_offset) << " start_bit_offset=" << start_bit_offset;
     EXPECT_EQ(1u, table.NumRows());
     EXPECT_EQ(42u, table.Get(0));
@@ -113,13 +113,13 @@
   constexpr uint32_t kNoValue = -1;
   std::vector<uint8_t> buffer;
   size_t encode_bit_offset = 0;
-  BitTableBuilder<4> builder(&allocator);
+  BitTableBuilderBase<4> builder(&allocator);
   builder.Add({42u, kNoValue, 0u, static_cast<uint32_t>(-2)});
   builder.Add({62u, kNoValue, 63u, static_cast<uint32_t>(-3)});
   builder.Encode(&buffer, &encode_bit_offset);
 
   size_t decode_bit_offset = 0;
-  BitTable<4> table(buffer.data(), buffer.size(), &decode_bit_offset);
+  BitTableBase<4> table(buffer.data(), buffer.size(), &decode_bit_offset);
   EXPECT_EQ(encode_bit_offset, decode_bit_offset);
   EXPECT_EQ(2u, table.NumRows());
   EXPECT_EQ(42u, table.Get(0, 0));
@@ -141,9 +141,9 @@
   ArenaStack arena_stack(&pool);
   ScopedArenaAllocator allocator(&arena_stack);
 
-  BitTableBuilder<2> builder(&allocator);
-  BitTableBuilder<2>::Entry value0{1, 2};
-  BitTableBuilder<2>::Entry value1{3, 4};
+  BitTableBuilderBase<2> builder(&allocator);
+  BitTableBuilderBase<2>::Entry value0{1, 2};
+  BitTableBuilderBase<2>::Entry value1{3, 4};
   EXPECT_EQ(0u, builder.Dedup(&value0));
   EXPECT_EQ(1u, builder.Dedup(&value1));
   EXPECT_EQ(0u, builder.Dedup(&value0));
@@ -169,7 +169,7 @@
   EXPECT_EQ(1 + static_cast<uint32_t>(POPCOUNT(value)), builder.size());
 
   size_t decode_bit_offset = 0;
-  BitTable<1> table(buffer.data(), buffer.size(), &decode_bit_offset);
+  BitTableBase<1> table(buffer.data(), buffer.size(), &decode_bit_offset);
   EXPECT_EQ(encode_bit_offset, decode_bit_offset);
   for (auto it : indicies) {
     uint64_t expected = it.first;
@@ -187,10 +187,10 @@
   ScopedArenaAllocator allocator(&arena_stack);
   FNVHash<MemoryRegion> hasher;
 
-  BitTableBuilder<2>::Entry value0{56948505, 0};
-  BitTableBuilder<2>::Entry value1{67108869, 0};
+  BitTableBuilderBase<2>::Entry value0{56948505, 0};
+  BitTableBuilderBase<2>::Entry value1{67108869, 0};
 
-  BitTableBuilder<2> builder(&allocator);
+  BitTableBuilderBase<2> builder(&allocator);
   EXPECT_EQ(hasher(MemoryRegion(&value0, sizeof(value0))),
             hasher(MemoryRegion(&value1, sizeof(value1))));
   EXPECT_EQ(0u, builder.Dedup(&value0));
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 43609e8..f2418d0 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -91,7 +91,7 @@
 
 template<typename Accessor>
 static void AddTableSizeStats(const char* table_name,
-                              const BitTable<Accessor::kCount>& table,
+                              const BitTable<Accessor>& table,
                               /*out*/ Stats* parent) {
   Stats* table_stats = parent->Child(table_name);
   table_stats->AddBits(table.BitSize());
@@ -135,7 +135,7 @@
 template<typename Accessor>
 static void DumpTable(VariableIndentationOutputStream* vios,
                       const char* table_name,
-                      const BitTable<Accessor::kCount>& table,
+                      const BitTable<Accessor>& table,
                       bool verbose,
                       bool is_mask = false) {
   if (table.NumRows() != 0) {
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 8af73e9..64a084f 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -119,7 +119,7 @@
  * - Knowing the inlining information,
  * - Knowing the values of dex registers.
  */
-class StackMap : public BitTable<8>::Accessor {
+class StackMap : public BitTableAccessor<8> {
  public:
   enum Kind {
     Default = -1,
@@ -138,7 +138,7 @@
   BIT_TABLE_COLUMN(7, DexRegisterMapIndex)
 
   ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const {
-    return UnpackNativePc(Get<kPackedNativePc>(), instruction_set);
+    return UnpackNativePc(GetPackedNativePc(), instruction_set);
   }
 
   ALWAYS_INLINE bool HasInlineInfo() const {
@@ -172,7 +172,7 @@
  * The row referenced from the StackMap holds information at depth 0.
  * Following rows hold information for further depths.
  */
-class InlineInfo : public BitTable<6>::Accessor {
+class InlineInfo : public BitTableAccessor<6> {
  public:
   BIT_TABLE_HEADER()
   BIT_TABLE_COLUMN(0, IsLast)  // Determines if there are further rows for further depths.
@@ -206,7 +206,7 @@
             const MethodInfo& method_info) const;
 };
 
-class InvokeInfo : public BitTable<3>::Accessor {
+class InvokeInfo : public BitTableAccessor<3> {
  public:
   BIT_TABLE_HEADER()
   BIT_TABLE_COLUMN(0, PackedNativePc)
@@ -214,7 +214,7 @@
   BIT_TABLE_COLUMN(2, MethodInfoIndex)
 
   ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const {
-    return StackMap::UnpackNativePc(Get<kPackedNativePc>(), instruction_set);
+    return StackMap::UnpackNativePc(GetPackedNativePc(), instruction_set);
   }
 
   uint32_t GetMethodIndex(MethodInfo method_info) const {
@@ -222,19 +222,19 @@
   }
 };
 
-class MaskInfo : public BitTable<1>::Accessor {
+class MaskInfo : public BitTableAccessor<1> {
  public:
   BIT_TABLE_HEADER()
   BIT_TABLE_COLUMN(0, Mask)
 };
 
-class DexRegisterMapInfo : public BitTable<1>::Accessor {
+class DexRegisterMapInfo : public BitTableAccessor<1> {
  public:
   BIT_TABLE_HEADER()
   BIT_TABLE_COLUMN(0, CatalogueIndex)
 };
 
-class DexRegisterInfo : public BitTable<2>::Accessor {
+class DexRegisterInfo : public BitTableAccessor<2> {
  public:
   BIT_TABLE_HEADER()
   BIT_TABLE_COLUMN(0, Kind)
@@ -265,7 +265,7 @@
 
 // Register masks tend to have many trailing zero bits (caller-saves are usually not encoded),
 // therefore it is worth encoding the mask as value+shift.
-class RegisterMask : public BitTable<2>::Accessor {
+class RegisterMask : public BitTableAccessor<2> {
  public:
   BIT_TABLE_HEADER()
   BIT_TABLE_COLUMN(0, Value)
@@ -303,7 +303,7 @@
   }
 
   ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const {
-    return StackMap(&stack_maps_, index);
+    return stack_maps_.GetRow(index);
   }
 
   BitMemoryRegion GetStackMask(size_t index) const {
@@ -317,7 +317,7 @@
 
   uint32_t GetRegisterMaskOf(const StackMap& stack_map) const {
     uint32_t index = stack_map.GetRegisterMaskIndex();
-    return (index == StackMap::kNoValue) ? 0 : RegisterMask(&register_masks_, index).GetMask();
+    return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask();
   }
 
   uint32_t GetNumberOfLocationCatalogEntries() const {
@@ -327,7 +327,7 @@
   ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const {
     return (index == StackMap::kNoValue)
       ? DexRegisterLocation::None()
-      : DexRegisterInfo(&dex_register_catalog_, index).GetLocation();
+      : dex_register_catalog_.GetRow(index).GetLocation();
   }
 
   uint32_t GetNumberOfStackMaps() const {
@@ -335,7 +335,7 @@
   }
 
   InvokeInfo GetInvokeInfo(size_t index) const {
-    return InvokeInfo(&invoke_infos_, index);
+    return invoke_infos_.GetRow(index);
   }
 
   ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const {
@@ -363,7 +363,7 @@
   }
 
   InlineInfo GetInlineInfo(size_t index) const {
-    return InlineInfo(&inline_infos_, index);
+    return inline_infos_.GetRow(index);
   }
 
   uint32_t GetInlineDepthOf(StackMap stack_map) const {
@@ -473,14 +473,14 @@
   }
 
   size_t size_;
-  BitTable<StackMap::kCount> stack_maps_;
-  BitTable<RegisterMask::kCount> register_masks_;
-  BitTable<MaskInfo::kCount> stack_masks_;
-  BitTable<InvokeInfo::kCount> invoke_infos_;
-  BitTable<InlineInfo::kCount> inline_infos_;
-  BitTable<MaskInfo::kCount> dex_register_masks_;
-  BitTable<DexRegisterMapInfo::kCount> dex_register_maps_;
-  BitTable<DexRegisterInfo::kCount> dex_register_catalog_;
+  BitTable<StackMap> stack_maps_;
+  BitTable<RegisterMask> register_masks_;
+  BitTable<MaskInfo> stack_masks_;
+  BitTable<InvokeInfo> invoke_infos_;
+  BitTable<InlineInfo> inline_infos_;
+  BitTable<MaskInfo> dex_register_masks_;
+  BitTable<DexRegisterMapInfo> dex_register_maps_;
+  BitTable<DexRegisterInfo> dex_register_catalog_;
   uint32_t number_of_dex_registers_;  // Excludes any inlined methods.
 };