blob: 1a368ed34713b9fa25fd07f8103bfd142fef5f3e [file] [log] [blame]
Calin Juravlec416d332015-04-23 16:01:43 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000016
Calin Juravlec416d332015-04-23 16:01:43 +010017#include "stack_map_stream.h"
18
David Srbecky049d6812018-05-18 14:46:49 +010019#include <memory>
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +000020#include <vector>
David Srbecky049d6812018-05-18 14:46:49 +010021
Andreas Gampe90b936d2017-01-31 08:58:55 -080022#include "art_method-inl.h"
Santiago Aboy Solanesab1d5592022-06-24 11:16:35 +010023#include "base/globals.h"
David Srbecky45aa5982016-03-18 02:15:09 +000024#include "base/stl_util.h"
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +000025#include "class_linker.h"
26#include "dex/dex_file.h"
David Sehr9e734c72018-01-04 17:56:19 -080027#include "dex/dex_file_types.h"
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +010028#include "driver/compiler_options.h"
29#include "optimizing/code_generator.h"
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +000030#include "optimizing/nodes.h"
Nicolas Geoffrayfbdfa6d2017-02-03 10:43:13 +000031#include "optimizing/optimizing_compiler.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000032#include "runtime.h"
33#include "scoped_thread_state_change-inl.h"
David Srbecky71ec1cc2018-05-18 15:57:25 +010034#include "stack_map.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000035
VladimĂ­r Marko434d9682022-11-04 14:04:17 +000036namespace art HIDDEN {
Calin Juravlec416d332015-04-23 16:01:43 +010037
David Srbecky049d6812018-05-18 14:46:49 +010038constexpr static bool kVerifyStackMaps = kIsDebugBuild;
39
David Srbeckyd02b23f2018-05-29 23:27:22 +010040uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
David Srbeckyf325e282018-06-13 15:02:32 +010041 return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_);
David Srbeckyd02b23f2018-05-29 23:27:22 +010042}
43
44void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) {
David Srbeckyf325e282018-06-13 15:02:32 +010045 stack_maps_[i][StackMap::kPackedNativePc] =
46 StackMap::PackNativePc(native_pc_offset, instruction_set_);
David Srbeckyd02b23f2018-05-29 23:27:22 +010047}
48
David Srbeckyf6ba5b32018-06-23 22:05:49 +010049void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
50 size_t core_spill_mask,
51 size_t fp_spill_mask,
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +000052 uint32_t num_dex_registers,
Mythri Alled9e83772022-07-14 09:38:49 +000053 bool baseline,
54 bool debuggable) {
David Srbeckyf6ba5b32018-06-23 22:05:49 +010055 DCHECK(!in_method_) << "Mismatched Begin/End calls";
56 in_method_ = true;
David Srbecky3aaaa212018-07-30 16:46:53 +010057 DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called";
David Srbeckyf6ba5b32018-06-23 22:05:49 +010058
David Srbecky3aaaa212018-07-30 16:46:53 +010059 DCHECK_ALIGNED(frame_size_in_bytes, kStackAlignment);
60 packed_frame_size_ = frame_size_in_bytes / kStackAlignment;
David Srbeckyf6ba5b32018-06-23 22:05:49 +010061 core_spill_mask_ = core_spill_mask;
62 fp_spill_mask_ = fp_spill_mask;
63 num_dex_registers_ = num_dex_registers;
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +000064 baseline_ = baseline;
Mythri Alled9e83772022-07-14 09:38:49 +000065 debuggable_ = debuggable;
David Srbecky67ba8722019-05-23 15:32:18 +010066
67 if (kVerifyStackMaps) {
68 dchecks_.emplace_back([=](const CodeInfo& code_info) {
69 DCHECK_EQ(code_info.packed_frame_size_, frame_size_in_bytes / kStackAlignment);
70 DCHECK_EQ(code_info.core_spill_mask_, core_spill_mask);
71 DCHECK_EQ(code_info.fp_spill_mask_, fp_spill_mask);
72 DCHECK_EQ(code_info.number_of_dex_registers_, num_dex_registers);
73 });
74 }
David Srbeckyf6ba5b32018-06-23 22:05:49 +010075}
76
David Srbecky17b4d2b2021-03-02 18:14:31 +000077void StackMapStream::EndMethod(size_t code_size) {
David Srbeckyf6ba5b32018-06-23 22:05:49 +010078 DCHECK(in_method_) << "Mismatched Begin/End calls";
79 in_method_ = false;
David Srbecky17b4d2b2021-03-02 18:14:31 +000080 code_size_ = code_size;
David Srbeckye7a91942018-08-01 17:23:53 +010081
82 // Read the stack masks now. The compiler might have updated them.
83 for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
84 BitVector* stack_mask = lazy_stack_masks_[i];
85 if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
86 stack_maps_[i][StackMap::kStackMaskIndex] =
87 stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
88 }
89 }
David Srbecky17b4d2b2021-03-02 18:14:31 +000090
91 if (kIsDebugBuild) {
92 uint32_t packed_code_size = StackMap::PackNativePc(code_size, instruction_set_);
93 for (size_t i = 0; i < stack_maps_.size(); i++) {
94 DCHECK_LE(stack_maps_[i][StackMap::kPackedNativePc], packed_code_size);
95 }
96 }
97
98 if (kVerifyStackMaps) {
99 dchecks_.emplace_back([=](const CodeInfo& code_info) {
100 CHECK_EQ(code_info.code_size_, code_size);
101 });
102 }
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100103}
104
Santiago Aboy Solanesab1d5592022-06-24 11:16:35 +0100105void StackMapStream::BeginStackMapEntry(
106 uint32_t dex_pc,
107 uint32_t native_pc_offset,
108 uint32_t register_mask,
109 BitVector* stack_mask,
110 StackMap::Kind kind,
111 bool needs_vreg_info,
112 const std::vector<uint32_t>& dex_pc_list_for_catch_verification) {
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100113 DCHECK(in_method_) << "Call BeginMethod first";
David Srbecky71ec1cc2018-05-18 15:57:25 +0100114 DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
115 in_stack_map_ = true;
116
Santiago Aboy Solanesab1d5592022-06-24 11:16:35 +0100117 DCHECK_IMPLIES(!dex_pc_list_for_catch_verification.empty(), kind == StackMap::Kind::Catch);
118 DCHECK_IMPLIES(!dex_pc_list_for_catch_verification.empty(), kIsDebugBuild);
119
David Srbeckycf7833e2018-06-14 16:45:22 +0100120 current_stack_map_ = BitTableBuilder<StackMap>::Entry();
David Srbeckyf325e282018-06-13 15:02:32 +0100121 current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind);
122 current_stack_map_[StackMap::kPackedNativePc] =
123 StackMap::PackNativePc(native_pc_offset, instruction_set_);
124 current_stack_map_[StackMap::kDexPc] = dex_pc;
David Srbecky0b4e5a32018-06-11 16:25:29 +0100125 if (stack_maps_.size() > 0) {
126 // Check that non-catch stack maps are sorted by pc.
127 // Catch stack maps are at the end and may be unordered.
128 if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) {
129 DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch);
130 } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) {
131 DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc],
132 current_stack_map_[StackMap::kPackedNativePc]);
133 }
134 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100135 if (register_mask != 0) {
136 uint32_t shift = LeastSignificantBit(register_mask);
David Srbeckycf7833e2018-06-14 16:45:22 +0100137 BitTableBuilder<RegisterMask>::Entry entry;
David Srbeckyf325e282018-06-13 15:02:32 +0100138 entry[RegisterMask::kValue] = register_mask >> shift;
139 entry[RegisterMask::kShift] = shift;
140 current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100141 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100142 // The compiler assumes the bit vector will be read during PrepareForFillIn(),
143 // and it might modify the data before that. Therefore, just store the pointer.
144 // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
145 lazy_stack_masks_.push_back(stack_mask);
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100146 current_inline_infos_.clear();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100147 current_dex_registers_.clear();
Artem Serov2808be82018-12-20 19:15:11 +0000148 expected_num_dex_registers_ = needs_vreg_info ? num_dex_registers_ : 0u;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100149
David Srbecky049d6812018-05-18 14:46:49 +0100150 if (kVerifyStackMaps) {
151 size_t stack_map_index = stack_maps_.size();
152 // Create lambda method, which will be executed at the very end to verify data.
153 // Parameters and local variables will be captured(stored) by the lambda "[=]".
154 dchecks_.emplace_back([=](const CodeInfo& code_info) {
David Srbecky50fac062018-06-13 18:55:35 +0100155 if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
156 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset,
157 instruction_set_);
158 CHECK_EQ(stack_map.Row(), stack_map_index);
159 } else if (kind == StackMap::Kind::Catch) {
Santiago Aboy Solanesab1d5592022-06-24 11:16:35 +0100160 StackMap stack_map = code_info.GetCatchStackMapForDexPc(
161 ArrayRef<const uint32_t>(dex_pc_list_for_catch_verification));
David Srbecky50fac062018-06-13 18:55:35 +0100162 CHECK_EQ(stack_map.Row(), stack_map_index);
163 }
David Srbecky049d6812018-05-18 14:46:49 +0100164 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
165 CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset);
David Srbecky50fac062018-06-13 18:55:35 +0100166 CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind));
David Srbecky049d6812018-05-18 14:46:49 +0100167 CHECK_EQ(stack_map.GetDexPc(), dex_pc);
168 CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
169 BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
170 CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
171 for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
172 CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
173 }
David Srbecky049d6812018-05-18 14:46:49 +0100174 });
David Srbecky71ec1cc2018-05-18 15:57:25 +0100175 }
Calin Juravlec416d332015-04-23 16:01:43 +0100176}
177
Calin Juravle4f46ac52015-04-23 18:47:21 +0100178void StackMapStream::EndStackMapEntry() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100179 DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
180 in_stack_map_ = false;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100181
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100182 // Generate index into the InlineInfo table.
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100183 size_t inlining_depth = current_inline_infos_.size();
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100184 if (!current_inline_infos_.empty()) {
David Srbeckyf325e282018-06-13 15:02:32 +0100185 current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast;
186 current_stack_map_[StackMap::kInlineInfoIndex] =
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100187 inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
David Srbecky71ec1cc2018-05-18 15:57:25 +0100188 }
189
David Srbecky6de88332018-06-03 12:00:11 +0100190 // Generate delta-compressed dex register map.
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100191 size_t num_dex_registers = current_dex_registers_.size();
192 if (!current_dex_registers_.empty()) {
193 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
194 CreateDexRegisterMap();
195 }
David Srbecky6de88332018-06-03 12:00:11 +0100196
David Srbecky71ec1cc2018-05-18 15:57:25 +0100197 stack_maps_.Add(current_stack_map_);
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100198
199 if (kVerifyStackMaps) {
200 size_t stack_map_index = stack_maps_.size() - 1;
201 dchecks_.emplace_back([=](const CodeInfo& code_info) {
202 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
203 CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
204 CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
David Srbecky93bd3612018-07-02 19:30:18 +0100205 CHECK_EQ(code_info.GetInlineInfosOf(stack_map).size(), inlining_depth);
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100206 });
207 }
Calin Juravle4f46ac52015-04-23 18:47:21 +0100208}
209
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000210void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100211 uint32_t dex_pc,
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000212 uint32_t num_dex_registers,
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +0100213 const DexFile* outer_dex_file,
214 const CodeGenerator* codegen) {
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100215 DCHECK(in_stack_map_) << "Call BeginStackMapEntry first";
David Srbecky71ec1cc2018-05-18 15:57:25 +0100216 DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
217 in_inline_info_ = true;
218 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
219
David Srbecky6de88332018-06-03 12:00:11 +0100220 expected_num_dex_registers_ += num_dex_registers;
221
David Srbeckycf7833e2018-06-14 16:45:22 +0100222 BitTableBuilder<InlineInfo>::Entry entry;
David Srbeckyf325e282018-06-13 15:02:32 +0100223 entry[InlineInfo::kIsLast] = InlineInfo::kMore;
224 entry[InlineInfo::kDexPc] = dex_pc;
225 entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_);
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000226 if (EncodeArtMethodInInlineInfo(method)) {
David Srbeckyf325e282018-06-13 15:02:32 +0100227 entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method));
228 entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method));
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000229 } else {
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +0100230 uint32_t is_in_bootclasspath = MethodInfo::kKindNonBCP;
231 uint32_t dexfile_index = MethodInfo::kSameDexFile;
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +0000232 if (dex_pc != static_cast<uint32_t>(-1)) {
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000233 ScopedObjectAccess soa(Thread::Current());
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +0000234 const DexFile* dex_file = method->GetDexFile();
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +0100235 if (!IsSameDexFile(*outer_dex_file, *dex_file)) {
Santiago Aboy Solanesf4bd5de2022-03-23 08:25:33 +0000236 if (method->GetDeclaringClass()->IsBootStrapClassLoaded()) {
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +0100237 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
238 const std::vector<const DexFile*>& boot_class_path = class_linker->GetBootClassPath();
239 auto it = std::find_if(
240 boot_class_path.begin(), boot_class_path.end(), [dex_file](const DexFile* df) {
241 return IsSameDexFile(*df, *dex_file);
242 });
243 is_in_bootclasspath = MethodInfo::kKindBCP;
244 dexfile_index = std::distance(boot_class_path.begin(), it);
245 } else {
246 const std::vector<const DexFile*>& dex_files =
247 codegen->GetCompilerOptions().GetDexFilesForOatFile();
248 auto it = std::find_if(dex_files.begin(), dex_files.end(), [dex_file](const DexFile* df) {
249 return IsSameDexFile(*df, *dex_file);
250 });
251 // No need to set is_in_bootclasspath since the default value works.
252 dexfile_index = std::distance(dex_files.begin(), it);
253 }
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +0000254 }
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000255 }
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100256 uint32_t dex_method_index = method->GetDexMethodIndex();
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +0000257 entry[InlineInfo::kMethodInfoIndex] =
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +0100258 method_infos_.Dedup({dex_method_index, is_in_bootclasspath, dexfile_index});
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000259 }
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100260 current_inline_infos_.push_back(entry);
David Srbecky71ec1cc2018-05-18 15:57:25 +0100261
David Srbecky049d6812018-05-18 14:46:49 +0100262 if (kVerifyStackMaps) {
263 size_t stack_map_index = stack_maps_.size();
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100264 size_t depth = current_inline_infos_.size() - 1;
David Srbecky049d6812018-05-18 14:46:49 +0100265 dchecks_.emplace_back([=](const CodeInfo& code_info) {
266 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
David Srbecky93bd3612018-07-02 19:30:18 +0100267 InlineInfo inline_info = code_info.GetInlineInfosOf(stack_map)[depth];
David Srbecky6e69e522018-06-03 12:00:14 +0100268 CHECK_EQ(inline_info.GetDexPc(), dex_pc);
David Srbecky049d6812018-05-18 14:46:49 +0100269 bool encode_art_method = EncodeArtMethodInInlineInfo(method);
David Srbecky6e69e522018-06-03 12:00:14 +0100270 CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method);
David Srbecky049d6812018-05-18 14:46:49 +0100271 if (encode_art_method) {
David Srbecky6e69e522018-06-03 12:00:14 +0100272 CHECK_EQ(inline_info.GetArtMethod(), method);
David Srbecky049d6812018-05-18 14:46:49 +0100273 } else {
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +0000274 MethodInfo method_info = code_info.GetMethodInfoOf(inline_info);
275 CHECK_EQ(method_info.GetMethodIndex(), method->GetDexMethodIndex());
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +0100276 CHECK(method_info.GetDexFileIndexKind() == MethodInfo::kKindNonBCP ||
277 method_info.GetDexFileIndexKind() == MethodInfo::kKindBCP);
278 ScopedObjectAccess soa(Thread::Current());
279 if (inline_info.GetDexPc() != static_cast<uint32_t>(-1) &&
280 !IsSameDexFile(*outer_dex_file, *method->GetDexFile())) {
Santiago Aboy Solanesf4bd5de2022-03-23 08:25:33 +0000281 if (method->GetDeclaringClass()->IsBootStrapClassLoaded()) {
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +0100282 CHECK_EQ(method_info.GetDexFileIndexKind(), MethodInfo::kKindBCP);
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +0000283 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
284 const std::vector<const DexFile*>& boot_class_path = class_linker->GetBootClassPath();
285 DCHECK_LT(method_info.GetDexFileIndex(), boot_class_path.size());
286 CHECK(IsSameDexFile(*boot_class_path[method_info.GetDexFileIndex()],
287 *method->GetDexFile()));
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +0100288 } else {
289 CHECK_EQ(method_info.GetDexFileIndexKind(), MethodInfo::kKindNonBCP);
290 const std::vector<const DexFile*>& dex_files =
291 codegen->GetCompilerOptions().GetDexFilesForOatFile();
292 DCHECK_LT(method_info.GetDexFileIndex(), dex_files.size());
293 CHECK(IsSameDexFile(*dex_files[method_info.GetDexFileIndex()], *method->GetDexFile()));
Santiago Aboy Solanese43aa3f2021-11-01 09:02:09 +0000294 }
295 }
David Srbecky049d6812018-05-18 14:46:49 +0100296 }
David Srbecky049d6812018-05-18 14:46:49 +0100297 });
David Srbecky71ec1cc2018-05-18 15:57:25 +0100298 }
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100299}
300
301void StackMapStream::EndInlineInfoEntry() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100302 DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
303 in_inline_info_ = false;
304 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
Calin Juravlec416d332015-04-23 16:01:43 +0100305}
306
David Srbecky6de88332018-06-03 12:00:11 +0100307// Create delta-compressed dex register map based on the current list of DexRegisterLocations.
308// All dex registers for a stack map are concatenated - inlined registers are just appended.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100309void StackMapStream::CreateDexRegisterMap() {
David Srbecky6de88332018-06-03 12:00:11 +0100310 // These are fields rather than local variables so that we can reuse the reserved memory.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100311 temp_dex_register_mask_.ClearAllBits();
312 temp_dex_register_map_.clear();
David Srbecky6de88332018-06-03 12:00:11 +0100313
314 // Ensure that the arrays that hold previous state are big enough to be safely indexed below.
315 if (previous_dex_registers_.size() < current_dex_registers_.size()) {
316 previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None());
317 dex_register_timestamp_.resize(current_dex_registers_.size(), 0u);
318 }
319
320 // Set bit in the mask for each register that has been changed since the previous stack map.
321 // Modified registers are stored in the catalogue and the catalogue index added to the list.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100322 for (size_t i = 0; i < current_dex_registers_.size(); i++) {
323 DexRegisterLocation reg = current_dex_registers_[i];
David Srbecky6de88332018-06-03 12:00:11 +0100324 // Distance is difference between this index and the index of last modification.
325 uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
326 if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
David Srbeckycf7833e2018-06-14 16:45:22 +0100327 BitTableBuilder<DexRegisterInfo>::Entry entry;
David Srbeckyf325e282018-06-13 15:02:32 +0100328 entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind());
329 entry[DexRegisterInfo::kPackedValue] =
330 DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue());
David Srbecky6de88332018-06-03 12:00:11 +0100331 uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100332 temp_dex_register_mask_.SetBit(i);
David Srbeckyf325e282018-06-13 15:02:32 +0100333 temp_dex_register_map_.push_back({index});
David Srbecky6de88332018-06-03 12:00:11 +0100334 previous_dex_registers_[i] = reg;
335 dex_register_timestamp_[i] = stack_maps_.size();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100336 }
Calin Juravlec416d332015-04-23 16:01:43 +0100337 }
Calin Juravlec416d332015-04-23 16:01:43 +0100338
David Srbecky6de88332018-06-03 12:00:11 +0100339 // Set the mask and map for the current StackMap (which includes inlined registers).
David Srbecky71ec1cc2018-05-18 15:57:25 +0100340 if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
David Srbeckyf325e282018-06-13 15:02:32 +0100341 current_stack_map_[StackMap::kDexRegisterMaskIndex] =
David Srbecky6de88332018-06-03 12:00:11 +0100342 dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
343 temp_dex_register_mask_.GetNumberOfBits());
Vladimir Marko225b6462015-09-28 12:17:40 +0100344 }
David Srbecky6de88332018-06-03 12:00:11 +0100345 if (!current_dex_registers_.empty()) {
David Srbeckyf325e282018-06-13 15:02:32 +0100346 current_stack_map_[StackMap::kDexRegisterMapIndex] =
David Srbecky6de88332018-06-03 12:00:11 +0100347 dex_register_maps_.Dedup(temp_dex_register_map_.data(),
348 temp_dex_register_map_.size());
David Srbecky052f8ca2018-04-26 15:42:54 +0100349 }
David Srbecky049d6812018-05-18 14:46:49 +0100350
351 if (kVerifyStackMaps) {
352 size_t stack_map_index = stack_maps_.size();
David Srbecky049d6812018-05-18 14:46:49 +0100353 // We need to make copy of the current registers for later (when the check is run).
David Srbecky6de88332018-06-03 12:00:11 +0100354 auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>(
David Srbecky049d6812018-05-18 14:46:49 +0100355 current_dex_registers_.begin(), current_dex_registers_.end());
356 dchecks_.emplace_back([=](const CodeInfo& code_info) {
357 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
David Srbecky6de88332018-06-03 12:00:11 +0100358 uint32_t expected_reg = 0;
359 for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) {
360 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
David Srbecky049d6812018-05-18 14:46:49 +0100361 }
David Srbecky93bd3612018-07-02 19:30:18 +0100362 for (InlineInfo inline_info : code_info.GetInlineInfosOf(stack_map)) {
363 DexRegisterMap map = code_info.GetInlineDexRegisterMapOf(stack_map, inline_info);
364 for (DexRegisterLocation reg : map) {
David Srbecky6de88332018-06-03 12:00:11 +0100365 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
366 }
367 }
368 CHECK_EQ(expected_reg, expected_dex_registers->size());
David Srbecky049d6812018-05-18 14:46:49 +0100369 });
370 }
Calin Juravlec416d332015-04-23 16:01:43 +0100371}
372
David Srbeckye7a91942018-08-01 17:23:53 +0100373ScopedArenaVector<uint8_t> StackMapStream::Encode() {
374 DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
375 DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
David Srbecky71ec1cc2018-05-18 15:57:25 +0100376
David Srbecky697c47a2019-06-16 21:53:07 +0100377 uint32_t flags = (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0;
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +0000378 flags |= baseline_ ? CodeInfo::kIsBaseline : 0;
Mythri Alled9e83772022-07-14 09:38:49 +0000379 flags |= debuggable_ ? CodeInfo::kIsDebuggable : 0;
David Srbecky17b4d2b2021-03-02 18:14:31 +0000380 DCHECK_LE(flags, kVarintMax); // Ensure flags can be read directly as byte.
David Srbecky697c47a2019-06-16 21:53:07 +0100381 uint32_t bit_table_flags = 0;
382 ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) {
383 if (bit_table->size() != 0) { // Record which bit-tables are stored.
384 bit_table_flags |= 1 << i;
385 }
386 });
387
David Srbeckye7a91942018-08-01 17:23:53 +0100388 ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream));
389 BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer);
David Srbecky6c4ec5c2019-06-20 07:23:19 +0000390 out.WriteInterleavedVarints(std::array<uint32_t, CodeInfo::kNumHeaders>{
391 flags,
David Srbecky17b4d2b2021-03-02 18:14:31 +0000392 code_size_,
David Srbecky6c4ec5c2019-06-20 07:23:19 +0000393 packed_frame_size_,
394 core_spill_mask_,
395 fp_spill_mask_,
396 num_dex_registers_,
397 bit_table_flags,
398 });
David Srbecky697c47a2019-06-16 21:53:07 +0100399 ForEachBitTable([&out](size_t, auto bit_table) {
400 if (bit_table->size() != 0) { // Skip empty bit-tables.
401 bit_table->Encode(out);
402 }
403 });
David Srbecky45aa5982016-03-18 02:15:09 +0000404
David Srbeckya38e6cf2018-06-26 18:13:49 +0100405 // Verify that we can load the CodeInfo and check some essentials.
David Srbecky0d4567f2019-05-30 22:45:40 +0100406 size_t number_of_read_bits;
407 CodeInfo code_info(buffer.data(), &number_of_read_bits);
408 CHECK_EQ(number_of_read_bits, out.NumberOfWrittenBits());
David Srbeckya38e6cf2018-06-26 18:13:49 +0100409 CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
David Srbecky17b4d2b2021-03-02 18:14:31 +0000410 CHECK_EQ(CodeInfo::HasInlineInfo(buffer.data()), inline_infos_.size() > 0);
411 CHECK_EQ(CodeInfo::IsBaseline(buffer.data()), baseline_);
Mathieu Chartier1a20b682017-01-31 14:25:16 -0800412
David Srbecky049d6812018-05-18 14:46:49 +0100413 // Verify all written data (usually only in debug builds).
414 if (kVerifyStackMaps) {
David Srbecky049d6812018-05-18 14:46:49 +0100415 for (const auto& dcheck : dchecks_) {
416 dcheck(code_info);
David Srbecky1bbdfd72016-02-24 16:39:26 +0000417 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100418 }
David Srbeckye7a91942018-08-01 17:23:53 +0100419
420 return buffer;
David Srbecky1bbdfd72016-02-24 16:39:26 +0000421}
422
Calin Juravlec416d332015-04-23 16:01:43 +0100423} // namespace art