Revert^2 "Optimize register mask and stack mask in stack maps."

This reverts commit 8b20b5c1f5b454b2f8b8bff492c88724b5002600.

Reason for revert: Retry submit unmodified after fixing the test.

Use BitTable to store the masks as well and move the
deduplication responsibility to the BitTable builders.

Don't generate entries for masks which are all zeros.
This saves 0.2% of .oat file size on both Arm64 and Arm.

Encode registers as (value+shift) due to tailing zeros.
This saves 1.0% of .oat file size on Arm64 and 0.2% on Arm.

Test: test-art-target-gtest-exception_test
Test: test-art-host-gtest-bit_table_test
Test: test-art-host-gtest-stack_map_test
Change-Id: Ib643776dbec3f051cc29cd13ff39e453fab5fae9
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 91cecf0..1cb9a39 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -799,6 +799,24 @@
   }
 };
 
+// Register masks tend to have many trailing zero bits (caller-saves are usually not encoded),
+// therefore it is worth encoding the mask as value+shift.
+class RegisterMask : public BitTable<2>::Accessor {
+ public:
+  enum Field {
+    kValue,
+    kShift,
+    kCount,
+  };
+
+  RegisterMask(const BitTable<kCount>* table, uint32_t row)
+    : BitTable<kCount>::Accessor(table, row) {}
+
+  ALWAYS_INLINE uint32_t GetMask() const {
+    return Get<kValue>() << Get<kShift>();
+  }
+};
+
 /**
  * Wrapper around all compiler information collected for a method.
  * The information is of the form:
@@ -833,24 +851,22 @@
     return DexRegisterLocationCatalog(location_catalog_);
   }
 
-  ALWAYS_INLINE size_t GetNumberOfStackMaskBits() const {
-    return stack_mask_bits_;
-  }
-
   ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const {
     return StackMap(&stack_maps_, index);
   }
 
   BitMemoryRegion GetStackMask(size_t index) const {
-    return stack_masks_.Subregion(index * stack_mask_bits_, stack_mask_bits_);
+    return stack_masks_.GetBitMemoryRegion(index);
   }
 
   BitMemoryRegion GetStackMaskOf(const StackMap& stack_map) const {
-    return GetStackMask(stack_map.GetStackMaskIndex());
+    uint32_t index = stack_map.GetStackMaskIndex();
+    return (index == StackMap::kNoValue) ? BitMemoryRegion() : GetStackMask(index);
   }
 
   uint32_t GetRegisterMaskOf(const StackMap& stack_map) const {
-    return register_masks_.Get(stack_map.GetRegisterMaskIndex());
+    uint32_t index = stack_map.GetRegisterMaskIndex();
+    return (index == StackMap::kNoValue) ? 0 : RegisterMask(&register_masks_, index).GetMask();
   }
 
   uint32_t GetNumberOfLocationCatalogEntries() const {
@@ -1045,8 +1061,8 @@
     invoke_infos_.Decode(bit_region, &bit_offset);
     inline_infos_.Decode(bit_region, &bit_offset);
     register_masks_.Decode(bit_region, &bit_offset);
-    stack_mask_bits_ = DecodeVarintBits(bit_region, &bit_offset);
-    stack_masks_ = bit_region.Subregion(bit_offset, non_header_size * kBitsPerByte - bit_offset);
+    stack_masks_.Decode(bit_region, &bit_offset);
+    CHECK_EQ(BitsToBytesRoundUp(bit_offset), non_header_size);
   }
 
   size_t size_;
@@ -1056,9 +1072,8 @@
   BitTable<StackMap::Field::kCount> stack_maps_;
   BitTable<InvokeInfo::Field::kCount> invoke_infos_;
   BitTable<InlineInfo::Field::kCount> inline_infos_;
-  BitTable<1> register_masks_;
-  uint32_t stack_mask_bits_ = 0;
-  BitMemoryRegion stack_masks_;
+  BitTable<RegisterMask::Field::kCount> register_masks_;
+  BitTable<1> stack_masks_;
 
   friend class OatDumper;
 };