diff options
author | 2018-06-13 15:02:32 +0100 | |
---|---|---|
committer | 2018-06-14 12:41:55 +0100 | |
commit | f325e28656b51167f29496e87926b805240052c4 (patch) | |
tree | b37117db0fa312c567fddb8d6a06df6dd0b9c052 | |
parent | 50fac06c51864f293c61ff9d0983b82698cf6dac (diff) |
Remove POD helper classes for BitTableBuilder.
Instead of declaring the classes explicitly and then casting,
create generic BitTableBuilder::Entry class for that purpose.
This removes the need to keep the POD helper classes in sync.
Test: test-art-host-gtest-stack_map_test
Test: test-art-host-gtest-bit_table_test
Change-Id: I4c632313bafd3a4bc823648436a5310b6f2a1d13
-rw-r--r-- | compiler/optimizing/stack_map_stream.cc | 88 | ||||
-rw-r--r-- | compiler/optimizing/stack_map_stream.h | 62 | ||||
-rw-r--r-- | libartbase/base/bit_table.h | 59 | ||||
-rw-r--r-- | libartbase/base/bit_table_test.cc | 64 |
4 files changed, 114 insertions, 159 deletions
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc index cd115499a6..ca585147d1 100644 --- a/compiler/optimizing/stack_map_stream.cc +++ b/compiler/optimizing/stack_map_stream.cc @@ -31,11 +31,12 @@ namespace art { constexpr static bool kVerifyStackMaps = kIsDebugBuild; uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) { - return StackMap::UnpackNativePc(stack_maps_[i].packed_native_pc, instruction_set_); + return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_); } void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) { - stack_maps_[i].packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_); + stack_maps_[i][StackMap::kPackedNativePc] = + StackMap::PackNativePc(native_pc_offset, instruction_set_); } void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, @@ -55,20 +56,17 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, DCHECK_EQ(num_dex_registers_, num_dex_registers) << "Inconsistent register count"; } - current_stack_map_ = StackMapEntry { - .kind = static_cast<uint32_t>(kind), - .packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_), - .dex_pc = dex_pc, - .register_mask_index = kNoValue, - .stack_mask_index = kNoValue, - .inline_info_index = kNoValue, - .dex_register_mask_index = kNoValue, - .dex_register_map_index = kNoValue, - }; + current_stack_map_ = BitTableBuilder<StackMap::kCount>::Entry(); + current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind); + current_stack_map_[StackMap::kPackedNativePc] = + StackMap::PackNativePc(native_pc_offset, instruction_set_); + current_stack_map_[StackMap::kDexPc] = dex_pc; if (register_mask != 0) { uint32_t shift = LeastSignificantBit(register_mask); - RegisterMaskEntry entry = { register_mask >> shift, shift }; - current_stack_map_.register_mask_index = register_masks_.Dedup(&entry); + BitTableBuilder<RegisterMask::kCount>::Entry entry; + entry[RegisterMask::kValue] = register_mask >> shift; + entry[RegisterMask::kShift] = shift; + current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry); } // The compiler assumes the bit vector will be read during PrepareForFillIn(), // and it might modify the data before that. Therefore, just store the pointer. @@ -114,8 +112,8 @@ void StackMapStream::EndStackMapEntry() { // Generate index into the InlineInfo table. if (!current_inline_infos_.empty()) { - current_inline_infos_.back().is_last = InlineInfo::kLast; - current_stack_map_.inline_info_index = + current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast; + current_stack_map_[StackMap::kInlineInfoIndex] = inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size()); } @@ -130,13 +128,13 @@ void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t } void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) { - uint32_t packed_native_pc = current_stack_map_.packed_native_pc; + uint32_t packed_native_pc = current_stack_map_[StackMap::kPackedNativePc]; size_t invoke_info_index = invoke_infos_.size(); - invoke_infos_.Add(InvokeInfoEntry { - .packed_native_pc = packed_native_pc, - .invoke_type = invoke_type, - .method_info_index = method_infos_.Dedup(&dex_method_index), - }); + BitTableBuilder<InvokeInfo::kCount>::Entry entry; + entry[InvokeInfo::kPackedNativePc] = packed_native_pc; + entry[InvokeInfo::kInvokeType] = invoke_type; + entry[InvokeInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index}); + invoke_infos_.Add(entry); if (kVerifyStackMaps) { dchecks_.emplace_back([=](const CodeInfo& code_info) { @@ -144,7 +142,7 @@ void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index CHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_), StackMap::UnpackNativePc(packed_native_pc, instruction_set_)); CHECK_EQ(invoke_info.GetInvokeType(), invoke_type); - CHECK_EQ(method_infos_[invoke_info.GetMethodInfoIndex()], dex_method_index); + CHECK_EQ(method_infos_[invoke_info.GetMethodInfoIndex()][0], dex_method_index); }); } } @@ -159,24 +157,20 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method, expected_num_dex_registers_ += num_dex_registers; - InlineInfoEntry entry = { - .is_last = InlineInfo::kMore, - .dex_pc = dex_pc, - .method_info_index = kNoValue, - .art_method_hi = kNoValue, - .art_method_lo = kNoValue, - .num_dex_registers = static_cast<uint32_t>(expected_num_dex_registers_), - }; + BitTableBuilder<InlineInfo::kCount>::Entry entry; + entry[InlineInfo::kIsLast] = InlineInfo::kMore; + entry[InlineInfo::kDexPc] = dex_pc; + entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_); if (EncodeArtMethodInInlineInfo(method)) { - entry.art_method_hi = High32Bits(reinterpret_cast<uintptr_t>(method)); - entry.art_method_lo = Low32Bits(reinterpret_cast<uintptr_t>(method)); + entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method)); + entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method)); } else { if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) { ScopedObjectAccess soa(Thread::Current()); DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile())); } uint32_t dex_method_index = method->GetDexMethodIndexUnchecked(); - entry.method_info_index = method_infos_.Dedup(&dex_method_index); + entry[InlineInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index}); } current_inline_infos_.push_back(entry); @@ -192,7 +186,7 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method, if (encode_art_method) { CHECK_EQ(inline_info.GetArtMethod(), method); } else { - CHECK_EQ(method_infos_[inline_info.GetMethodInfoIndex()], + CHECK_EQ(method_infos_[inline_info.GetMethodInfoIndex()][0], method->GetDexMethodIndexUnchecked()); } }); @@ -225,13 +219,13 @@ void StackMapStream::CreateDexRegisterMap() { // Distance is difference between this index and the index of last modification. uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i]; if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) { - DexRegisterEntry entry = DexRegisterEntry{ - .kind = static_cast<uint32_t>(reg.GetKind()), - .packed_value = DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()), - }; + BitTableBuilder<DexRegisterInfo::kCount>::Entry entry; + entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind()); + entry[DexRegisterInfo::kPackedValue] = + DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()); uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue; temp_dex_register_mask_.SetBit(i); - temp_dex_register_map_.push_back(index); + temp_dex_register_map_.push_back({index}); previous_dex_registers_[i] = reg; dex_register_timestamp_[i] = stack_maps_.size(); } @@ -239,12 +233,12 @@ void StackMapStream::CreateDexRegisterMap() { // Set the mask and map for the current StackMap (which includes inlined registers). if (temp_dex_register_mask_.GetNumberOfBits() != 0) { - current_stack_map_.dex_register_mask_index = + current_stack_map_[StackMap::kDexRegisterMaskIndex] = dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(), temp_dex_register_mask_.GetNumberOfBits()); } if (!current_dex_registers_.empty()) { - current_stack_map_.dex_register_map_index = + current_stack_map_[StackMap::kDexRegisterMapIndex] = dex_register_maps_.Dedup(temp_dex_register_map_.data(), temp_dex_register_map_.size()); } @@ -275,7 +269,7 @@ void StackMapStream::FillInMethodInfo(MemoryRegion region) { { MethodInfo info(region.begin(), method_infos_.size()); for (size_t i = 0; i < method_infos_.size(); ++i) { - info.SetMethodIndex(i, method_infos_[i]); + info.SetMethodIndex(i, method_infos_[i][0]); } } if (kVerifyStackMaps) { @@ -284,23 +278,19 @@ void StackMapStream::FillInMethodInfo(MemoryRegion region) { const size_t count = info.NumMethodIndices(); DCHECK_EQ(count, method_infos_.size()); for (size_t i = 0; i < count; ++i) { - DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i]); + DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i][0]); } } } size_t StackMapStream::PrepareForFillIn() { - static_assert(sizeof(StackMapEntry) == StackMap::kCount * sizeof(uint32_t), "Layout"); - static_assert(sizeof(InvokeInfoEntry) == InvokeInfo::kCount * sizeof(uint32_t), "Layout"); - static_assert(sizeof(InlineInfoEntry) == InlineInfo::kCount * sizeof(uint32_t), "Layout"); - static_assert(sizeof(DexRegisterEntry) == DexRegisterInfo::kCount * sizeof(uint32_t), "Layout"); DCHECK_EQ(out_.size(), 0u); // Read the stack masks now. The compiler might have updated them. for (size_t i = 0; i < lazy_stack_masks_.size(); i++) { BitVector* stack_mask = lazy_stack_masks_[i]; if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) { - stack_maps_[i].stack_mask_index = + stack_maps_[i][StackMap::kStackMaskIndex] = stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits()); } } diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h index 06868476bc..c0a41afef2 100644 --- a/compiler/optimizing/stack_map_stream.h +++ b/compiler/optimizing/stack_map_stream.h @@ -52,6 +52,7 @@ class StackMapStream : public ValueObject { lazy_stack_masks_(allocator->Adapter(kArenaAllocStackMapStream)), in_stack_map_(false), in_inline_info_(false), + current_stack_map_(), current_inline_infos_(allocator->Adapter(kArenaAllocStackMapStream)), current_dex_registers_(allocator->Adapter(kArenaAllocStackMapStream)), previous_dex_registers_(allocator->Adapter(kArenaAllocStackMapStream)), @@ -97,70 +98,29 @@ class StackMapStream : public ValueObject { private: static constexpr uint32_t kNoValue = -1; - // The fields must be uint32_t and mirror the StackMap accessor in stack_map.h! - struct StackMapEntry { - uint32_t kind; - uint32_t packed_native_pc; - uint32_t dex_pc; - uint32_t register_mask_index; - uint32_t stack_mask_index; - uint32_t inline_info_index; - uint32_t dex_register_mask_index; - uint32_t dex_register_map_index; - }; - - // The fields must be uint32_t and mirror the InlineInfo accessor in stack_map.h! - struct InlineInfoEntry { - uint32_t is_last; - uint32_t dex_pc; - uint32_t method_info_index; - uint32_t art_method_hi; - uint32_t art_method_lo; - uint32_t num_dex_registers; - }; - - // The fields must be uint32_t and mirror the InvokeInfo accessor in stack_map.h! - struct InvokeInfoEntry { - uint32_t packed_native_pc; - uint32_t invoke_type; - uint32_t method_info_index; - }; - - // The fields must be uint32_t and mirror the DexRegisterInfo accessor in stack_map.h! - struct DexRegisterEntry { - uint32_t kind; - uint32_t packed_value; - }; - - // The fields must be uint32_t and mirror the RegisterMask accessor in stack_map.h! - struct RegisterMaskEntry { - uint32_t value; - uint32_t shift; - }; - void CreateDexRegisterMap(); const InstructionSet instruction_set_; - BitTableBuilder<StackMapEntry> stack_maps_; - BitTableBuilder<RegisterMaskEntry> register_masks_; + BitTableBuilder<StackMap::kCount> stack_maps_; + BitTableBuilder<RegisterMask::kCount> register_masks_; BitmapTableBuilder stack_masks_; - BitTableBuilder<InvokeInfoEntry> invoke_infos_; - BitTableBuilder<InlineInfoEntry> inline_infos_; + BitTableBuilder<InvokeInfo::kCount> invoke_infos_; + BitTableBuilder<InlineInfo::kCount> inline_infos_; BitmapTableBuilder dex_register_masks_; - BitTableBuilder<uint32_t> dex_register_maps_; - BitTableBuilder<DexRegisterEntry> dex_register_catalog_; + BitTableBuilder<MaskInfo::kCount> dex_register_maps_; + BitTableBuilder<DexRegisterInfo::kCount> dex_register_catalog_; uint32_t num_dex_registers_ = 0; // TODO: Make this const and get the value in constructor. ScopedArenaVector<uint8_t> out_; - BitTableBuilder<uint32_t> method_infos_; + BitTableBuilder<1> method_infos_; ScopedArenaVector<BitVector*> lazy_stack_masks_; // Variables which track the current state between Begin/End calls; bool in_stack_map_; bool in_inline_info_; - StackMapEntry current_stack_map_; - ScopedArenaVector<InlineInfoEntry> current_inline_infos_; + BitTableBuilder<StackMap::kCount>::Entry current_stack_map_; + ScopedArenaVector<BitTableBuilder<InlineInfo::kCount>::Entry> current_inline_infos_; ScopedArenaVector<DexRegisterLocation> current_dex_registers_; ScopedArenaVector<DexRegisterLocation> previous_dex_registers_; ScopedArenaVector<uint32_t> dex_register_timestamp_; // Stack map index of last change. @@ -169,7 +129,7 @@ class StackMapStream : public ValueObject { // Temporary variables used in CreateDexRegisterMap. // They are here so that we can reuse the reserved memory. ArenaBitVector temp_dex_register_mask_; - ScopedArenaVector<uint32_t> temp_dex_register_map_; + ScopedArenaVector<BitTableBuilder<DexRegisterMapInfo::kCount>::Entry> temp_dex_register_map_; // A set of lambda functions to be executed at the end to verify // the encoded data. It is generally only used in debug builds. diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h index 6a714e6b9d..2cc1a31ade 100644 --- a/libartbase/base/bit_table.h +++ b/libartbase/base/bit_table.h @@ -18,6 +18,7 @@ #define ART_LIBARTBASE_BASE_BIT_TABLE_H_ #include <array> +#include <initializer_list> #include <numeric> #include <string.h> #include <type_traits> @@ -184,33 +185,54 @@ static const char* const* GetBitTableColumnNames() { } // Helper class for encoding BitTable. It can optionally de-duplicate the inputs. -// Type 'T' must be POD type consisting of uint32_t fields (one for each column). -template<typename T> +template<uint32_t kNumColumns> class BitTableBuilder { public: - static_assert(std::is_pod<T>::value, "Type 'T' must be POD"); - static constexpr size_t kNumColumns = sizeof(T) / sizeof(uint32_t); + class Entry { + public: + Entry() { + std::fill_n(data_, kNumColumns, BitTable<kNumColumns>::Accessor::kNoValue); + } + + Entry(std::initializer_list<uint32_t> values) { + DCHECK_EQ(values.size(), kNumColumns); + std::copy(values.begin(), values.end(), data_); + } + + uint32_t& operator[](size_t column) { + DCHECK_LT(column, kNumColumns); + return data_[column]; + } + + uint32_t operator[](size_t column) const { + DCHECK_LT(column, kNumColumns); + return data_[column]; + } + + private: + uint32_t data_[kNumColumns]; + }; explicit BitTableBuilder(ScopedArenaAllocator* allocator) : rows_(allocator->Adapter(kArenaAllocBitTableBuilder)), dedup_(8, allocator->Adapter(kArenaAllocBitTableBuilder)) { } - T& operator[](size_t row) { return rows_[row]; } - const T& operator[](size_t row) const { return rows_[row]; } + Entry& operator[](size_t row) { return rows_[row]; } + const Entry& operator[](size_t row) const { return rows_[row]; } size_t size() const { return rows_.size(); } // Append given value to the vector without de-duplication. // This will not add the element to the dedup map to avoid its associated costs. - void Add(T value) { + void Add(Entry value) { rows_.push_back(value); } // Append given list of values and return the index of the first value. // If the exact same set of values was already added, return the old index. - uint32_t Dedup(T* values, size_t count = 1) { + uint32_t Dedup(Entry* values, size_t count = 1) { FNVHash<MemoryRegion> hasher; - uint32_t hash = hasher(MemoryRegion(values, sizeof(T) * count)); + uint32_t hash = hasher(MemoryRegion(values, sizeof(Entry) * count)); // Check if we have already added identical set of values. auto range = dedup_.equal_range(hash); @@ -220,8 +242,8 @@ class BitTableBuilder { std::equal(values, values + count, rows_.begin() + index, - [](const T& lhs, const T& rhs) { - return memcmp(&lhs, &rhs, sizeof(T)) == 0; + [](const Entry& lhs, const Entry& rhs) { + return memcmp(&lhs, &rhs, sizeof(Entry)) == 0; })) { return index; } @@ -234,11 +256,8 @@ class BitTableBuilder { return index; } - ALWAYS_INLINE uint32_t Get(uint32_t row, uint32_t column) const { - DCHECK_LT(row, size()); - DCHECK_LT(column, kNumColumns); - const uint32_t* data = reinterpret_cast<const uint32_t*>(&rows_[row]); - return data[column]; + uint32_t Dedup(Entry value) { + return Dedup(&value, /* count */ 1); } // Calculate the column bit widths based on the current data. @@ -247,7 +266,7 @@ class BitTableBuilder { std::fill_n(max_column_value, kNumColumns, 0); for (uint32_t r = 0; r < size(); r++) { for (uint32_t c = 0; c < kNumColumns; c++) { - max_column_value[c] |= Get(r, c) - BitTable<kNumColumns>::kValueBias; + max_column_value[c] |= rows_[r][c] - BitTable<kNumColumns>::kValueBias; } } for (uint32_t c = 0; c < kNumColumns; c++) { @@ -276,7 +295,7 @@ class BitTableBuilder { BitMemoryRegion region(MemoryRegion(out->data(), out->size())); for (uint32_t r = 0; r < size(); r++) { for (uint32_t c = 0; c < kNumColumns; c++) { - region.StoreBitsAndAdvance(bit_offset, Get(r, c) - bias, column_bits[c]); + region.StoreBitsAndAdvance(bit_offset, rows_[r][c] - bias, column_bits[c]); } } } @@ -292,14 +311,14 @@ class BitTableBuilder { } for (uint32_t r = 0; r < size(); r++) { for (uint32_t c = 0; c < kNumColumns; c++) { - DCHECK_EQ(Get(r, c), table.Get(r, c)) << " (" << r << ", " << c << ")"; + DCHECK_EQ(rows_[r][c], table.Get(r, c)) << " (" << r << ", " << c << ")"; } } } } protected: - ScopedArenaDeque<T> rows_; + ScopedArenaDeque<Entry> rows_; ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_; // Hash -> row index. }; diff --git a/libartbase/base/bit_table_test.cc b/libartbase/base/bit_table_test.cc index 8abf0da9d9..969940fe39 100644 --- a/libartbase/base/bit_table_test.cc +++ b/libartbase/base/bit_table_test.cc @@ -50,7 +50,7 @@ TEST(BitTableTest, TestEmptyTable) { std::vector<uint8_t> buffer; size_t encode_bit_offset = 0; - BitTableBuilder<uint32_t> builder(&allocator); + BitTableBuilder<1> builder(&allocator); builder.Encode(&buffer, &encode_bit_offset); size_t decode_bit_offset = 0; @@ -67,11 +67,11 @@ TEST(BitTableTest, TestSingleColumnTable) { constexpr uint32_t kNoValue = -1; std::vector<uint8_t> buffer; size_t encode_bit_offset = 0; - BitTableBuilder<uint32_t> builder(&allocator); - builder.Add(42u); - builder.Add(kNoValue); - builder.Add(1000u); - builder.Add(kNoValue); + BitTableBuilder<1> builder(&allocator); + builder.Add({42u}); + builder.Add({kNoValue}); + builder.Add({1000u}); + builder.Add({kNoValue}); builder.Encode(&buffer, &encode_bit_offset); size_t decode_bit_offset = 0; @@ -93,8 +93,8 @@ TEST(BitTableTest, TestUnalignedTable) { for (size_t start_bit_offset = 0; start_bit_offset <= 32; start_bit_offset++) { std::vector<uint8_t> buffer; size_t encode_bit_offset = start_bit_offset; - BitTableBuilder<uint32_t> builder(&allocator); - builder.Add(42u); + BitTableBuilder<1> builder(&allocator); + builder.Add({42u}); builder.Encode(&buffer, &encode_bit_offset); size_t decode_bit_offset = start_bit_offset; @@ -113,15 +113,9 @@ TEST(BitTableTest, TestBigTable) { constexpr uint32_t kNoValue = -1; std::vector<uint8_t> buffer; size_t encode_bit_offset = 0; - struct RowData { - uint32_t a; - uint32_t b; - uint32_t c; - uint32_t d; - }; - BitTableBuilder<RowData> builder(&allocator); - builder.Add(RowData{42u, kNoValue, 0u, static_cast<uint32_t>(-2)}); - builder.Add(RowData{62u, kNoValue, 63u, static_cast<uint32_t>(-3)}); + BitTableBuilder<4> builder(&allocator); + builder.Add({42u, kNoValue, 0u, static_cast<uint32_t>(-2)}); + builder.Add({62u, kNoValue, 63u, static_cast<uint32_t>(-3)}); builder.Encode(&buffer, &encode_bit_offset); size_t decode_bit_offset = 0; @@ -147,13 +141,9 @@ TEST(BitTableTest, TestDedup) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); - struct RowData { - uint32_t a; - uint32_t b; - }; - BitTableBuilder<RowData> builder(&allocator); - RowData value0{1, 2}; - RowData value1{3, 4}; + BitTableBuilder<2> builder(&allocator); + BitTableBuilder<2>::Entry value0{1, 2}; + BitTableBuilder<2>::Entry value1{3, 4}; EXPECT_EQ(0u, builder.Dedup(&value0)); EXPECT_EQ(1u, builder.Dedup(&value1)); EXPECT_EQ(0u, builder.Dedup(&value0)); @@ -197,16 +187,12 @@ TEST(BitTableTest, TestCollisions) { ScopedArenaAllocator allocator(&arena_stack); FNVHash<MemoryRegion> hasher; - struct RowData { - uint32_t a; - uint32_t b; - }; - RowData value0{56948505, 0}; - RowData value1{67108869, 0}; + BitTableBuilder<2>::Entry value0{56948505, 0}; + BitTableBuilder<2>::Entry value1{67108869, 0}; - BitTableBuilder<RowData> builder(&allocator); - EXPECT_EQ(hasher(MemoryRegion(&value0, sizeof(RowData))), - hasher(MemoryRegion(&value1, sizeof(RowData)))); + BitTableBuilder<2> builder(&allocator); + EXPECT_EQ(hasher(MemoryRegion(&value0, sizeof(value0))), + hasher(MemoryRegion(&value1, sizeof(value1)))); EXPECT_EQ(0u, builder.Dedup(&value0)); EXPECT_EQ(1u, builder.Dedup(&value1)); EXPECT_EQ(0u, builder.Dedup(&value0)); @@ -214,12 +200,12 @@ TEST(BitTableTest, TestCollisions) { EXPECT_EQ(2u, builder.size()); BitmapTableBuilder builder2(&allocator); - EXPECT_EQ(hasher(MemoryRegion(&value0, BitsToBytesRoundUp(MinimumBitsToStore(value0.a)))), - hasher(MemoryRegion(&value1, BitsToBytesRoundUp(MinimumBitsToStore(value1.a))))); - EXPECT_EQ(0u, builder2.Dedup(&value0.a, MinimumBitsToStore(value0.a))); - EXPECT_EQ(1u, builder2.Dedup(&value1.a, MinimumBitsToStore(value1.a))); - EXPECT_EQ(0u, builder2.Dedup(&value0.a, MinimumBitsToStore(value0.a))); - EXPECT_EQ(1u, builder2.Dedup(&value1.a, MinimumBitsToStore(value1.a))); + EXPECT_EQ(hasher(MemoryRegion(&value0, BitsToBytesRoundUp(MinimumBitsToStore(value0[0])))), + hasher(MemoryRegion(&value1, BitsToBytesRoundUp(MinimumBitsToStore(value1[0]))))); + EXPECT_EQ(0u, builder2.Dedup(&value0[0], MinimumBitsToStore(value0[0]))); + EXPECT_EQ(1u, builder2.Dedup(&value1[0], MinimumBitsToStore(value1[0]))); + EXPECT_EQ(0u, builder2.Dedup(&value0[0], MinimumBitsToStore(value0[0]))); + EXPECT_EQ(1u, builder2.Dedup(&value1[0], MinimumBitsToStore(value1[0]))); EXPECT_EQ(2u, builder2.size()); } |