Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 16 | |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 17 | #include "stack_map_stream.h" |
| 18 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 19 | #include <memory> |
| 20 | |
Andreas Gampe | 90b936d | 2017-01-31 08:58:55 -0800 | [diff] [blame] | 21 | #include "art_method-inl.h" |
David Srbecky | 45aa598 | 2016-03-18 02:15:09 +0000 | [diff] [blame] | 22 | #include "base/stl_util.h" |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 23 | #include "dex/dex_file_types.h" |
Nicolas Geoffray | fbdfa6d | 2017-02-03 10:43:13 +0000 | [diff] [blame] | 24 | #include "optimizing/optimizing_compiler.h" |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 25 | #include "runtime.h" |
| 26 | #include "scoped_thread_state_change-inl.h" |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 27 | #include "stack_map.h" |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 28 | |
Vladimir Marko | 0a51605 | 2019-10-14 13:00:44 +0000 | [diff] [blame] | 29 | namespace art { |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 30 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 31 | constexpr static bool kVerifyStackMaps = kIsDebugBuild; |
| 32 | |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 33 | uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 34 | return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_); |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 35 | } |
| 36 | |
| 37 | void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 38 | stack_maps_[i][StackMap::kPackedNativePc] = |
| 39 | StackMap::PackNativePc(native_pc_offset, instruction_set_); |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 40 | } |
| 41 | |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 42 | void StackMapStream::BeginMethod(size_t frame_size_in_bytes, |
| 43 | size_t core_spill_mask, |
| 44 | size_t fp_spill_mask, |
Nicolas Geoffray | a59af8a | 2019-11-27 17:42:32 +0000 | [diff] [blame] | 45 | uint32_t num_dex_registers, |
| 46 | bool baseline) { |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 47 | DCHECK(!in_method_) << "Mismatched Begin/End calls"; |
| 48 | in_method_ = true; |
David Srbecky | 3aaaa21 | 2018-07-30 16:46:53 +0100 | [diff] [blame] | 49 | DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called"; |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 50 | |
David Srbecky | 3aaaa21 | 2018-07-30 16:46:53 +0100 | [diff] [blame] | 51 | DCHECK_ALIGNED(frame_size_in_bytes, kStackAlignment); |
| 52 | packed_frame_size_ = frame_size_in_bytes / kStackAlignment; |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 53 | core_spill_mask_ = core_spill_mask; |
| 54 | fp_spill_mask_ = fp_spill_mask; |
| 55 | num_dex_registers_ = num_dex_registers; |
Nicolas Geoffray | a59af8a | 2019-11-27 17:42:32 +0000 | [diff] [blame] | 56 | baseline_ = baseline; |
David Srbecky | 67ba872 | 2019-05-23 15:32:18 +0100 | [diff] [blame] | 57 | |
| 58 | if (kVerifyStackMaps) { |
| 59 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 60 | DCHECK_EQ(code_info.packed_frame_size_, frame_size_in_bytes / kStackAlignment); |
| 61 | DCHECK_EQ(code_info.core_spill_mask_, core_spill_mask); |
| 62 | DCHECK_EQ(code_info.fp_spill_mask_, fp_spill_mask); |
| 63 | DCHECK_EQ(code_info.number_of_dex_registers_, num_dex_registers); |
| 64 | }); |
| 65 | } |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 66 | } |
| 67 | |
Nicolas Geoffray | bf5f0f3 | 2019-03-05 15:41:50 +0000 | [diff] [blame] | 68 | void StackMapStream::EndMethod() { |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 69 | DCHECK(in_method_) << "Mismatched Begin/End calls"; |
| 70 | in_method_ = false; |
David Srbecky | e7a9194 | 2018-08-01 17:23:53 +0100 | [diff] [blame] | 71 | |
| 72 | // Read the stack masks now. The compiler might have updated them. |
| 73 | for (size_t i = 0; i < lazy_stack_masks_.size(); i++) { |
| 74 | BitVector* stack_mask = lazy_stack_masks_[i]; |
| 75 | if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) { |
| 76 | stack_maps_[i][StackMap::kStackMaskIndex] = |
| 77 | stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits()); |
| 78 | } |
| 79 | } |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 80 | } |
| 81 | |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 82 | void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, |
| 83 | uint32_t native_pc_offset, |
| 84 | uint32_t register_mask, |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 85 | BitVector* stack_mask, |
Artem Serov | 2808be8 | 2018-12-20 19:15:11 +0000 | [diff] [blame] | 86 | StackMap::Kind kind, |
| 87 | bool needs_vreg_info) { |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 88 | DCHECK(in_method_) << "Call BeginMethod first"; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 89 | DCHECK(!in_stack_map_) << "Mismatched Begin/End calls"; |
| 90 | in_stack_map_ = true; |
| 91 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 92 | current_stack_map_ = BitTableBuilder<StackMap>::Entry(); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 93 | current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind); |
| 94 | current_stack_map_[StackMap::kPackedNativePc] = |
| 95 | StackMap::PackNativePc(native_pc_offset, instruction_set_); |
| 96 | current_stack_map_[StackMap::kDexPc] = dex_pc; |
David Srbecky | 0b4e5a3 | 2018-06-11 16:25:29 +0100 | [diff] [blame] | 97 | if (stack_maps_.size() > 0) { |
| 98 | // Check that non-catch stack maps are sorted by pc. |
| 99 | // Catch stack maps are at the end and may be unordered. |
| 100 | if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) { |
| 101 | DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch); |
| 102 | } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) { |
| 103 | DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc], |
| 104 | current_stack_map_[StackMap::kPackedNativePc]); |
| 105 | } |
| 106 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 107 | if (register_mask != 0) { |
| 108 | uint32_t shift = LeastSignificantBit(register_mask); |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 109 | BitTableBuilder<RegisterMask>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 110 | entry[RegisterMask::kValue] = register_mask >> shift; |
| 111 | entry[RegisterMask::kShift] = shift; |
| 112 | current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry); |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 113 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 114 | // The compiler assumes the bit vector will be read during PrepareForFillIn(), |
| 115 | // and it might modify the data before that. Therefore, just store the pointer. |
| 116 | // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h. |
| 117 | lazy_stack_masks_.push_back(stack_mask); |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 118 | current_inline_infos_.clear(); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 119 | current_dex_registers_.clear(); |
Artem Serov | 2808be8 | 2018-12-20 19:15:11 +0000 | [diff] [blame] | 120 | expected_num_dex_registers_ = needs_vreg_info ? num_dex_registers_ : 0u; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 121 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 122 | if (kVerifyStackMaps) { |
| 123 | size_t stack_map_index = stack_maps_.size(); |
| 124 | // Create lambda method, which will be executed at the very end to verify data. |
| 125 | // Parameters and local variables will be captured(stored) by the lambda "[=]". |
| 126 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
David Srbecky | 50fac06 | 2018-06-13 18:55:35 +0100 | [diff] [blame] | 127 | if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) { |
| 128 | StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, |
| 129 | instruction_set_); |
| 130 | CHECK_EQ(stack_map.Row(), stack_map_index); |
| 131 | } else if (kind == StackMap::Kind::Catch) { |
| 132 | StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc); |
| 133 | CHECK_EQ(stack_map.Row(), stack_map_index); |
| 134 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 135 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
| 136 | CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset); |
David Srbecky | 50fac06 | 2018-06-13 18:55:35 +0100 | [diff] [blame] | 137 | CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind)); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 138 | CHECK_EQ(stack_map.GetDexPc(), dex_pc); |
| 139 | CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask); |
| 140 | BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map); |
| 141 | CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0); |
| 142 | for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) { |
| 143 | CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b)); |
| 144 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 145 | }); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 146 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 147 | } |
| 148 | |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 149 | void StackMapStream::EndStackMapEntry() { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 150 | DCHECK(in_stack_map_) << "Mismatched Begin/End calls"; |
| 151 | in_stack_map_ = false; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 152 | |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 153 | // Generate index into the InlineInfo table. |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 154 | size_t inlining_depth = current_inline_infos_.size(); |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 155 | if (!current_inline_infos_.empty()) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 156 | current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast; |
| 157 | current_stack_map_[StackMap::kInlineInfoIndex] = |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 158 | inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size()); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 159 | } |
| 160 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 161 | // Generate delta-compressed dex register map. |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 162 | size_t num_dex_registers = current_dex_registers_.size(); |
| 163 | if (!current_dex_registers_.empty()) { |
| 164 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
| 165 | CreateDexRegisterMap(); |
| 166 | } |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 167 | |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 168 | stack_maps_.Add(current_stack_map_); |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 169 | |
| 170 | if (kVerifyStackMaps) { |
| 171 | size_t stack_map_index = stack_maps_.size() - 1; |
| 172 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 173 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
| 174 | CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0)); |
| 175 | CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0)); |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 176 | CHECK_EQ(code_info.GetInlineInfosOf(stack_map).size(), inlining_depth); |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 177 | }); |
| 178 | } |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 179 | } |
| 180 | |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 181 | void StackMapStream::BeginInlineInfoEntry(ArtMethod* method, |
Nicolas Geoffray | b1d0f3f | 2015-05-14 12:41:51 +0100 | [diff] [blame] | 182 | uint32_t dex_pc, |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 183 | uint32_t num_dex_registers, |
| 184 | const DexFile* outer_dex_file) { |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame] | 185 | DCHECK(in_stack_map_) << "Call BeginStackMapEntry first"; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 186 | DCHECK(!in_inline_info_) << "Mismatched Begin/End calls"; |
| 187 | in_inline_info_ = true; |
| 188 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
| 189 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 190 | expected_num_dex_registers_ += num_dex_registers; |
| 191 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 192 | BitTableBuilder<InlineInfo>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 193 | entry[InlineInfo::kIsLast] = InlineInfo::kMore; |
| 194 | entry[InlineInfo::kDexPc] = dex_pc; |
| 195 | entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 196 | if (EncodeArtMethodInInlineInfo(method)) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 197 | entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method)); |
| 198 | entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method)); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 199 | } else { |
| 200 | if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) { |
| 201 | ScopedObjectAccess soa(Thread::Current()); |
| 202 | DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile())); |
| 203 | } |
Vladimir Marko | c945e0d | 2018-07-18 17:26:45 +0100 | [diff] [blame] | 204 | uint32_t dex_method_index = method->GetDexMethodIndex(); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 205 | entry[InlineInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index}); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 206 | } |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 207 | current_inline_infos_.push_back(entry); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 208 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 209 | if (kVerifyStackMaps) { |
| 210 | size_t stack_map_index = stack_maps_.size(); |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 211 | size_t depth = current_inline_infos_.size() - 1; |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 212 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 213 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 214 | InlineInfo inline_info = code_info.GetInlineInfosOf(stack_map)[depth]; |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 215 | CHECK_EQ(inline_info.GetDexPc(), dex_pc); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 216 | bool encode_art_method = EncodeArtMethodInInlineInfo(method); |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 217 | CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 218 | if (encode_art_method) { |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 219 | CHECK_EQ(inline_info.GetArtMethod(), method); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 220 | } else { |
David Srbecky | 8cd5454 | 2018-07-15 23:58:44 +0100 | [diff] [blame] | 221 | CHECK_EQ(code_info.GetMethodIndexOf(inline_info), method->GetDexMethodIndex()); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 222 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 223 | }); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 224 | } |
Nicolas Geoffray | b1d0f3f | 2015-05-14 12:41:51 +0100 | [diff] [blame] | 225 | } |
| 226 | |
| 227 | void StackMapStream::EndInlineInfoEntry() { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 228 | DCHECK(in_inline_info_) << "Mismatched Begin/End calls"; |
| 229 | in_inline_info_ = false; |
| 230 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 231 | } |
| 232 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 233 | // Create delta-compressed dex register map based on the current list of DexRegisterLocations. |
| 234 | // All dex registers for a stack map are concatenated - inlined registers are just appended. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 235 | void StackMapStream::CreateDexRegisterMap() { |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 236 | // These are fields rather than local variables so that we can reuse the reserved memory. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 237 | temp_dex_register_mask_.ClearAllBits(); |
| 238 | temp_dex_register_map_.clear(); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 239 | |
| 240 | // Ensure that the arrays that hold previous state are big enough to be safely indexed below. |
| 241 | if (previous_dex_registers_.size() < current_dex_registers_.size()) { |
| 242 | previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None()); |
| 243 | dex_register_timestamp_.resize(current_dex_registers_.size(), 0u); |
| 244 | } |
| 245 | |
| 246 | // Set bit in the mask for each register that has been changed since the previous stack map. |
| 247 | // Modified registers are stored in the catalogue and the catalogue index added to the list. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 248 | for (size_t i = 0; i < current_dex_registers_.size(); i++) { |
| 249 | DexRegisterLocation reg = current_dex_registers_[i]; |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 250 | // Distance is difference between this index and the index of last modification. |
| 251 | uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i]; |
| 252 | if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) { |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 253 | BitTableBuilder<DexRegisterInfo>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 254 | entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind()); |
| 255 | entry[DexRegisterInfo::kPackedValue] = |
| 256 | DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 257 | uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 258 | temp_dex_register_mask_.SetBit(i); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 259 | temp_dex_register_map_.push_back({index}); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 260 | previous_dex_registers_[i] = reg; |
| 261 | dex_register_timestamp_[i] = stack_maps_.size(); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 262 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 263 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 264 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 265 | // Set the mask and map for the current StackMap (which includes inlined registers). |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 266 | if (temp_dex_register_mask_.GetNumberOfBits() != 0) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 267 | current_stack_map_[StackMap::kDexRegisterMaskIndex] = |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 268 | dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(), |
| 269 | temp_dex_register_mask_.GetNumberOfBits()); |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 270 | } |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 271 | if (!current_dex_registers_.empty()) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 272 | current_stack_map_[StackMap::kDexRegisterMapIndex] = |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 273 | dex_register_maps_.Dedup(temp_dex_register_map_.data(), |
| 274 | temp_dex_register_map_.size()); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 275 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 276 | |
| 277 | if (kVerifyStackMaps) { |
| 278 | size_t stack_map_index = stack_maps_.size(); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 279 | // We need to make copy of the current registers for later (when the check is run). |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 280 | auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>( |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 281 | current_dex_registers_.begin(), current_dex_registers_.end()); |
| 282 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 283 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 284 | uint32_t expected_reg = 0; |
| 285 | for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) { |
| 286 | CHECK_EQ((*expected_dex_registers)[expected_reg++], reg); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 287 | } |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 288 | for (InlineInfo inline_info : code_info.GetInlineInfosOf(stack_map)) { |
| 289 | DexRegisterMap map = code_info.GetInlineDexRegisterMapOf(stack_map, inline_info); |
| 290 | for (DexRegisterLocation reg : map) { |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 291 | CHECK_EQ((*expected_dex_registers)[expected_reg++], reg); |
| 292 | } |
| 293 | } |
| 294 | CHECK_EQ(expected_reg, expected_dex_registers->size()); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 295 | }); |
| 296 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 297 | } |
| 298 | |
David Srbecky | e7a9194 | 2018-08-01 17:23:53 +0100 | [diff] [blame] | 299 | ScopedArenaVector<uint8_t> StackMapStream::Encode() { |
| 300 | DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls"; |
| 301 | DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls"; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 302 | |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 303 | uint32_t flags = (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0; |
Nicolas Geoffray | a59af8a | 2019-11-27 17:42:32 +0000 | [diff] [blame] | 304 | flags |= baseline_ ? CodeInfo::kIsBaseline : 0; |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 305 | uint32_t bit_table_flags = 0; |
| 306 | ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) { |
| 307 | if (bit_table->size() != 0) { // Record which bit-tables are stored. |
| 308 | bit_table_flags |= 1 << i; |
| 309 | } |
| 310 | }); |
| 311 | |
David Srbecky | e7a9194 | 2018-08-01 17:23:53 +0100 | [diff] [blame] | 312 | ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream)); |
| 313 | BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer); |
David Srbecky | 6c4ec5c | 2019-06-20 07:23:19 +0000 | [diff] [blame] | 314 | out.WriteInterleavedVarints(std::array<uint32_t, CodeInfo::kNumHeaders>{ |
| 315 | flags, |
| 316 | packed_frame_size_, |
| 317 | core_spill_mask_, |
| 318 | fp_spill_mask_, |
| 319 | num_dex_registers_, |
| 320 | bit_table_flags, |
| 321 | }); |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 322 | ForEachBitTable([&out](size_t, auto bit_table) { |
| 323 | if (bit_table->size() != 0) { // Skip empty bit-tables. |
| 324 | bit_table->Encode(out); |
| 325 | } |
| 326 | }); |
David Srbecky | 45aa598 | 2016-03-18 02:15:09 +0000 | [diff] [blame] | 327 | |
David Srbecky | a38e6cf | 2018-06-26 18:13:49 +0100 | [diff] [blame] | 328 | // Verify that we can load the CodeInfo and check some essentials. |
David Srbecky | 0d4567f | 2019-05-30 22:45:40 +0100 | [diff] [blame] | 329 | size_t number_of_read_bits; |
| 330 | CodeInfo code_info(buffer.data(), &number_of_read_bits); |
| 331 | CHECK_EQ(number_of_read_bits, out.NumberOfWrittenBits()); |
David Srbecky | a38e6cf | 2018-06-26 18:13:49 +0100 | [diff] [blame] | 332 | CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size()); |
Mathieu Chartier | 1a20b68 | 2017-01-31 14:25:16 -0800 | [diff] [blame] | 333 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 334 | // Verify all written data (usually only in debug builds). |
| 335 | if (kVerifyStackMaps) { |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 336 | for (const auto& dcheck : dchecks_) { |
| 337 | dcheck(code_info); |
David Srbecky | 1bbdfd7 | 2016-02-24 16:39:26 +0000 | [diff] [blame] | 338 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 339 | } |
David Srbecky | e7a9194 | 2018-08-01 17:23:53 +0100 | [diff] [blame] | 340 | |
| 341 | return buffer; |
David Srbecky | 1bbdfd7 | 2016-02-24 16:39:26 +0000 | [diff] [blame] | 342 | } |
| 343 | |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 344 | } // namespace art |