David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "block_builder.h" |
| 18 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 19 | #include "base/logging.h" // FOR VLOG. |
David Sehr | 312f3b2 | 2018-03-19 08:39:26 -0700 | [diff] [blame] | 20 | #include "dex/bytecode_utils.h" |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 21 | #include "dex/code_item_accessors-inl.h" |
| 22 | #include "dex/dex_file_exception_helpers.h" |
Mathieu Chartier | de4b08f | 2017-07-10 14:13:41 -0700 | [diff] [blame] | 23 | #include "quicken_info.h" |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 24 | |
Vladimir Marko | 0a51605 | 2019-10-14 13:00:44 +0000 | [diff] [blame] | 25 | namespace art { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 26 | |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 27 | HBasicBlockBuilder::HBasicBlockBuilder(HGraph* graph, |
| 28 | const DexFile* const dex_file, |
| 29 | const CodeItemDebugInfoAccessor& accessor, |
| 30 | ScopedArenaAllocator* local_allocator) |
| 31 | : allocator_(graph->GetAllocator()), |
| 32 | graph_(graph), |
| 33 | dex_file_(dex_file), |
| 34 | code_item_accessor_(accessor), |
| 35 | local_allocator_(local_allocator), |
| 36 | branch_targets_(code_item_accessor_.HasCodeItem() |
| 37 | ? code_item_accessor_.InsnsSizeInCodeUnits() |
| 38 | : /* fake dex_pc=0 for intrinsic graph */ 1u, |
| 39 | nullptr, |
| 40 | local_allocator->Adapter(kArenaAllocGraphBuilder)), |
| 41 | throwing_blocks_(kDefaultNumberOfThrowingBlocks, |
| 42 | local_allocator->Adapter(kArenaAllocGraphBuilder)), |
| 43 | number_of_branches_(0u), |
| 44 | quicken_index_for_dex_pc_(std::less<uint32_t>(), |
| 45 | local_allocator->Adapter(kArenaAllocGraphBuilder)) {} |
| 46 | |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 47 | HBasicBlock* HBasicBlockBuilder::MaybeCreateBlockAt(uint32_t dex_pc) { |
| 48 | return MaybeCreateBlockAt(dex_pc, dex_pc); |
| 49 | } |
| 50 | |
| 51 | HBasicBlock* HBasicBlockBuilder::MaybeCreateBlockAt(uint32_t semantic_dex_pc, |
| 52 | uint32_t store_dex_pc) { |
| 53 | HBasicBlock* block = branch_targets_[store_dex_pc]; |
| 54 | if (block == nullptr) { |
Vladimir Marko | 69d310e | 2017-10-09 14:12:23 +0100 | [diff] [blame] | 55 | block = new (allocator_) HBasicBlock(graph_, semantic_dex_pc); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 56 | branch_targets_[store_dex_pc] = block; |
| 57 | } |
| 58 | DCHECK_EQ(block->GetDexPc(), semantic_dex_pc); |
| 59 | return block; |
| 60 | } |
| 61 | |
| 62 | bool HBasicBlockBuilder::CreateBranchTargets() { |
| 63 | // Create the first block for the dex instructions, single successor of the entry block. |
| 64 | MaybeCreateBlockAt(0u); |
| 65 | |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 66 | if (code_item_accessor_.TriesSize() != 0) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 67 | // Create branch targets at the start/end of the TryItem range. These are |
| 68 | // places where the program might fall through into/out of the a block and |
| 69 | // where TryBoundary instructions will be inserted later. Other edges which |
| 70 | // enter/exit the try blocks are a result of branches/switches. |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 71 | for (const dex::TryItem& try_item : code_item_accessor_.TryItems()) { |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 72 | uint32_t dex_pc_start = try_item.start_addr_; |
| 73 | uint32_t dex_pc_end = dex_pc_start + try_item.insn_count_; |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 74 | MaybeCreateBlockAt(dex_pc_start); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 75 | if (dex_pc_end < code_item_accessor_.InsnsSizeInCodeUnits()) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 76 | // TODO: Do not create block if the last instruction cannot fall through. |
| 77 | MaybeCreateBlockAt(dex_pc_end); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 78 | } else if (dex_pc_end == code_item_accessor_.InsnsSizeInCodeUnits()) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 79 | // The TryItem spans until the very end of the CodeItem and therefore |
| 80 | // cannot have any code afterwards. |
| 81 | } else { |
| 82 | // The TryItem spans beyond the end of the CodeItem. This is invalid code. |
Nicolas Geoffray | dbb9aef | 2017-11-23 10:44:11 +0000 | [diff] [blame] | 83 | VLOG(compiler) << "Not compiled: TryItem spans beyond the end of the CodeItem"; |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 84 | return false; |
| 85 | } |
| 86 | } |
| 87 | |
| 88 | // Create branch targets for exception handlers. |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 89 | const uint8_t* handlers_ptr = code_item_accessor_.GetCatchHandlerData(); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 90 | uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr); |
| 91 | for (uint32_t idx = 0; idx < handlers_size; ++idx) { |
| 92 | CatchHandlerIterator iterator(handlers_ptr); |
| 93 | for (; iterator.HasNext(); iterator.Next()) { |
| 94 | MaybeCreateBlockAt(iterator.GetHandlerAddress()); |
| 95 | } |
| 96 | handlers_ptr = iterator.EndDataPointer(); |
| 97 | } |
| 98 | } |
| 99 | |
| 100 | // Iterate over all instructions and find branching instructions. Create blocks for |
| 101 | // the locations these instructions branch to. |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 102 | for (const DexInstructionPcPair& pair : code_item_accessor_) { |
Mathieu Chartier | 0021feb | 2017-11-07 00:08:52 -0800 | [diff] [blame] | 103 | const uint32_t dex_pc = pair.DexPc(); |
| 104 | const Instruction& instruction = pair.Inst(); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 105 | |
| 106 | if (instruction.IsBranch()) { |
| 107 | number_of_branches_++; |
| 108 | MaybeCreateBlockAt(dex_pc + instruction.GetTargetOffset()); |
| 109 | } else if (instruction.IsSwitch()) { |
Aart Bik | d99f203 | 2018-04-06 15:24:35 -0700 | [diff] [blame] | 110 | number_of_branches_++; // count as at least one branch (b/77652521) |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 111 | DexSwitchTable table(instruction, dex_pc); |
| 112 | for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) { |
| 113 | MaybeCreateBlockAt(dex_pc + s_it.CurrentTargetOffset()); |
| 114 | |
| 115 | // Create N-1 blocks where we will insert comparisons of the input value |
| 116 | // against the Switch's case keys. |
| 117 | if (table.ShouldBuildDecisionTree() && !s_it.IsLast()) { |
| 118 | // Store the block under dex_pc of the current key at the switch data |
| 119 | // instruction for uniqueness but give it the dex_pc of the SWITCH |
| 120 | // instruction which it semantically belongs to. |
| 121 | MaybeCreateBlockAt(dex_pc, s_it.GetDexPcForCurrentIndex()); |
| 122 | } |
| 123 | } |
| 124 | } else if (instruction.Opcode() == Instruction::MOVE_EXCEPTION) { |
| 125 | // End the basic block after MOVE_EXCEPTION. This simplifies the later |
| 126 | // stage of TryBoundary-block insertion. |
| 127 | } else { |
| 128 | continue; |
| 129 | } |
| 130 | |
| 131 | if (instruction.CanFlowThrough()) { |
Mathieu Chartier | 0021feb | 2017-11-07 00:08:52 -0800 | [diff] [blame] | 132 | DexInstructionIterator next(std::next(DexInstructionIterator(pair))); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 133 | if (next == code_item_accessor_.end()) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 134 | // In the normal case we should never hit this but someone can artificially forge a dex |
| 135 | // file to fall-through out the method code. In this case we bail out compilation. |
Nicolas Geoffray | dbb9aef | 2017-11-23 10:44:11 +0000 | [diff] [blame] | 136 | VLOG(compiler) << "Not compiled: Fall-through beyond the CodeItem"; |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 137 | return false; |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 138 | } |
Mathieu Chartier | 0021feb | 2017-11-07 00:08:52 -0800 | [diff] [blame] | 139 | MaybeCreateBlockAt(next.DexPc()); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 140 | } |
| 141 | } |
| 142 | |
| 143 | return true; |
| 144 | } |
| 145 | |
| 146 | void HBasicBlockBuilder::ConnectBasicBlocks() { |
| 147 | HBasicBlock* block = graph_->GetEntryBlock(); |
| 148 | graph_->AddBlock(block); |
| 149 | |
Mathieu Chartier | de4b08f | 2017-07-10 14:13:41 -0700 | [diff] [blame] | 150 | size_t quicken_index = 0; |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 151 | bool is_throwing_block = false; |
Mathieu Chartier | de4b08f | 2017-07-10 14:13:41 -0700 | [diff] [blame] | 152 | // Calculate the qucikening index here instead of CreateBranchTargets since it's easier to |
| 153 | // calculate in dex_pc order. |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 154 | for (const DexInstructionPcPair& pair : code_item_accessor_) { |
Mathieu Chartier | 0021feb | 2017-11-07 00:08:52 -0800 | [diff] [blame] | 155 | const uint32_t dex_pc = pair.DexPc(); |
| 156 | const Instruction& instruction = pair.Inst(); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 157 | |
| 158 | // Check if this dex_pc address starts a new basic block. |
| 159 | HBasicBlock* next_block = GetBlockAt(dex_pc); |
| 160 | if (next_block != nullptr) { |
Mathieu Chartier | de4b08f | 2017-07-10 14:13:41 -0700 | [diff] [blame] | 161 | // We only need quicken index entries for basic block boundaries. |
| 162 | quicken_index_for_dex_pc_.Put(dex_pc, quicken_index); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 163 | if (block != nullptr) { |
| 164 | // Last instruction did not end its basic block but a new one starts here. |
| 165 | // It must have been a block falling through into the next one. |
| 166 | block->AddSuccessor(next_block); |
| 167 | } |
| 168 | block = next_block; |
| 169 | is_throwing_block = false; |
| 170 | graph_->AddBlock(block); |
| 171 | } |
Mathieu Chartier | de4b08f | 2017-07-10 14:13:41 -0700 | [diff] [blame] | 172 | // Make sure to increment this before the continues. |
Mathieu Chartier | 0021feb | 2017-11-07 00:08:52 -0800 | [diff] [blame] | 173 | if (QuickenInfoTable::NeedsIndexForInstruction(&instruction)) { |
Mathieu Chartier | de4b08f | 2017-07-10 14:13:41 -0700 | [diff] [blame] | 174 | ++quicken_index; |
| 175 | } |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 176 | |
| 177 | if (block == nullptr) { |
| 178 | // Ignore dead code. |
| 179 | continue; |
| 180 | } |
| 181 | |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 182 | if (!is_throwing_block && IsThrowingDexInstruction(instruction)) { |
| 183 | DCHECK(!ContainsElement(throwing_blocks_, block)); |
| 184 | is_throwing_block = true; |
| 185 | throwing_blocks_.push_back(block); |
| 186 | } |
| 187 | |
| 188 | if (instruction.IsBranch()) { |
| 189 | uint32_t target_dex_pc = dex_pc + instruction.GetTargetOffset(); |
| 190 | block->AddSuccessor(GetBlockAt(target_dex_pc)); |
| 191 | } else if (instruction.IsReturn() || (instruction.Opcode() == Instruction::THROW)) { |
| 192 | block->AddSuccessor(graph_->GetExitBlock()); |
| 193 | } else if (instruction.IsSwitch()) { |
| 194 | DexSwitchTable table(instruction, dex_pc); |
| 195 | for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) { |
| 196 | uint32_t target_dex_pc = dex_pc + s_it.CurrentTargetOffset(); |
| 197 | block->AddSuccessor(GetBlockAt(target_dex_pc)); |
| 198 | |
| 199 | if (table.ShouldBuildDecisionTree() && !s_it.IsLast()) { |
| 200 | uint32_t next_case_dex_pc = s_it.GetDexPcForCurrentIndex(); |
| 201 | HBasicBlock* next_case_block = GetBlockAt(next_case_dex_pc); |
| 202 | block->AddSuccessor(next_case_block); |
| 203 | block = next_case_block; |
| 204 | graph_->AddBlock(block); |
| 205 | } |
| 206 | } |
| 207 | } else { |
| 208 | // Remaining code only applies to instructions which end their basic block. |
| 209 | continue; |
| 210 | } |
| 211 | |
Mathieu Chartier | 0021feb | 2017-11-07 00:08:52 -0800 | [diff] [blame] | 212 | // Go to the next instruction in case we read dex PC below. |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 213 | if (instruction.CanFlowThrough()) { |
Mathieu Chartier | 0021feb | 2017-11-07 00:08:52 -0800 | [diff] [blame] | 214 | block->AddSuccessor(GetBlockAt(std::next(DexInstructionIterator(pair)).DexPc())); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 215 | } |
| 216 | |
| 217 | // The basic block ends here. Do not add any more instructions. |
| 218 | block = nullptr; |
| 219 | } |
| 220 | |
| 221 | graph_->AddBlock(graph_->GetExitBlock()); |
| 222 | } |
| 223 | |
| 224 | // Returns the TryItem stored for `block` or nullptr if there is no info for it. |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 225 | static const dex::TryItem* GetTryItem( |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 226 | HBasicBlock* block, |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 227 | const ScopedArenaSafeMap<uint32_t, const dex::TryItem*>& try_block_info) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 228 | auto iterator = try_block_info.find(block->GetBlockId()); |
| 229 | return (iterator == try_block_info.end()) ? nullptr : iterator->second; |
| 230 | } |
| 231 | |
| 232 | // Iterates over the exception handlers of `try_item`, finds the corresponding |
| 233 | // catch blocks and makes them successors of `try_boundary`. The order of |
| 234 | // successors matches the order in which runtime exception delivery searches |
| 235 | // for a handler. |
| 236 | static void LinkToCatchBlocks(HTryBoundary* try_boundary, |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 237 | const CodeItemDataAccessor& accessor, |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 238 | const dex::TryItem* try_item, |
Vladimir Marko | 69d310e | 2017-10-09 14:12:23 +0100 | [diff] [blame] | 239 | const ScopedArenaSafeMap<uint32_t, HBasicBlock*>& catch_blocks) { |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 240 | for (CatchHandlerIterator it(accessor.GetCatchHandlerData(try_item->handler_off_)); |
| 241 | it.HasNext(); |
| 242 | it.Next()) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 243 | try_boundary->AddExceptionHandler(catch_blocks.Get(it.GetHandlerAddress())); |
| 244 | } |
| 245 | } |
| 246 | |
| 247 | bool HBasicBlockBuilder::MightHaveLiveNormalPredecessors(HBasicBlock* catch_block) { |
| 248 | if (kIsDebugBuild) { |
| 249 | DCHECK_NE(catch_block->GetDexPc(), kNoDexPc) << "Should not be called on synthetic blocks"; |
| 250 | DCHECK(!graph_->GetEntryBlock()->GetSuccessors().empty()) |
| 251 | << "Basic blocks must have been created and connected"; |
| 252 | for (HBasicBlock* predecessor : catch_block->GetPredecessors()) { |
| 253 | DCHECK(!predecessor->IsSingleTryBoundary()) |
| 254 | << "TryBoundary blocks must not have not been created yet"; |
| 255 | } |
| 256 | } |
| 257 | |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 258 | const Instruction& first = code_item_accessor_.InstructionAt(catch_block->GetDexPc()); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 259 | if (first.Opcode() == Instruction::MOVE_EXCEPTION) { |
| 260 | // Verifier guarantees that if a catch block begins with MOVE_EXCEPTION then |
| 261 | // it has no live normal predecessors. |
| 262 | return false; |
| 263 | } else if (catch_block->GetPredecessors().empty()) { |
| 264 | // Normal control-flow edges have already been created. Since block's list of |
| 265 | // predecessors is empty, it cannot have any live or dead normal predecessors. |
| 266 | return false; |
| 267 | } |
| 268 | |
| 269 | // The catch block has normal predecessors but we do not know which are live |
| 270 | // and which will be removed during the initial DCE. Return `true` to signal |
| 271 | // that it may have live normal predecessors. |
| 272 | return true; |
| 273 | } |
| 274 | |
| 275 | void HBasicBlockBuilder::InsertTryBoundaryBlocks() { |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 276 | if (code_item_accessor_.TriesSize() == 0) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 277 | return; |
| 278 | } |
| 279 | |
| 280 | // Keep a map of all try blocks and their respective TryItems. We do not use |
| 281 | // the block's pointer but rather its id to ensure deterministic iteration. |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 282 | ScopedArenaSafeMap<uint32_t, const dex::TryItem*> try_block_info( |
Vladimir Marko | 69d310e | 2017-10-09 14:12:23 +0100 | [diff] [blame] | 283 | std::less<uint32_t>(), local_allocator_->Adapter(kArenaAllocGraphBuilder)); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 284 | |
| 285 | // Obtain TryItem information for blocks with throwing instructions, and split |
| 286 | // blocks which are both try & catch to simplify the graph. |
| 287 | for (HBasicBlock* block : graph_->GetBlocks()) { |
| 288 | if (block->GetDexPc() == kNoDexPc) { |
| 289 | continue; |
| 290 | } |
| 291 | |
| 292 | // Do not bother creating exceptional edges for try blocks which have no |
| 293 | // throwing instructions. In that case we simply assume that the block is |
| 294 | // not covered by a TryItem. This prevents us from creating a throw-catch |
| 295 | // loop for synchronized blocks. |
| 296 | if (ContainsElement(throwing_blocks_, block)) { |
| 297 | // Try to find a TryItem covering the block. |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 298 | const dex::TryItem* try_item = code_item_accessor_.FindTryItem(block->GetDexPc()); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 299 | if (try_item != nullptr) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 300 | // Block throwing and in a TryItem. Store the try block information. |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 301 | try_block_info.Put(block->GetBlockId(), try_item); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 302 | } |
| 303 | } |
| 304 | } |
| 305 | |
| 306 | // Map from a handler dex_pc to the corresponding catch block. |
Vladimir Marko | 69d310e | 2017-10-09 14:12:23 +0100 | [diff] [blame] | 307 | ScopedArenaSafeMap<uint32_t, HBasicBlock*> catch_blocks( |
| 308 | std::less<uint32_t>(), local_allocator_->Adapter(kArenaAllocGraphBuilder)); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 309 | |
| 310 | // Iterate over catch blocks, create artifical landing pads if necessary to |
| 311 | // simplify the CFG, and set metadata. |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 312 | const uint8_t* handlers_ptr = code_item_accessor_.GetCatchHandlerData(); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 313 | uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr); |
| 314 | for (uint32_t idx = 0; idx < handlers_size; ++idx) { |
| 315 | CatchHandlerIterator iterator(handlers_ptr); |
| 316 | for (; iterator.HasNext(); iterator.Next()) { |
| 317 | uint32_t address = iterator.GetHandlerAddress(); |
Nicolas Geoffray | ebc3b25 | 2019-01-09 10:47:02 +0000 | [diff] [blame] | 318 | auto existing = catch_blocks.find(address); |
| 319 | if (existing != catch_blocks.end()) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 320 | // Catch block already processed. |
Nicolas Geoffray | ebc3b25 | 2019-01-09 10:47:02 +0000 | [diff] [blame] | 321 | TryCatchInformation* info = existing->second->GetTryCatchInformation(); |
| 322 | if (iterator.GetHandlerTypeIndex() != info->GetCatchTypeIndex()) { |
| 323 | // The handler is for multiple types. We could record all the types, but |
| 324 | // doing class resolution here isn't ideal, and it's unclear whether wasting |
| 325 | // the space in TryCatchInformation is worth it. |
| 326 | info->SetInvalidTypeIndex(); |
| 327 | } |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 328 | continue; |
| 329 | } |
| 330 | |
| 331 | // Check if we should create an artifical landing pad for the catch block. |
| 332 | // We create one if the catch block is also a try block because we do not |
| 333 | // have a strategy for inserting TryBoundaries on exceptional edges. |
| 334 | // We also create one if the block might have normal predecessors so as to |
| 335 | // simplify register allocation. |
| 336 | HBasicBlock* catch_block = GetBlockAt(address); |
| 337 | bool is_try_block = (try_block_info.find(catch_block->GetBlockId()) != try_block_info.end()); |
| 338 | if (is_try_block || MightHaveLiveNormalPredecessors(catch_block)) { |
Vladimir Marko | 69d310e | 2017-10-09 14:12:23 +0100 | [diff] [blame] | 339 | HBasicBlock* new_catch_block = new (allocator_) HBasicBlock(graph_, address); |
| 340 | new_catch_block->AddInstruction(new (allocator_) HGoto(address)); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 341 | new_catch_block->AddSuccessor(catch_block); |
| 342 | graph_->AddBlock(new_catch_block); |
| 343 | catch_block = new_catch_block; |
| 344 | } |
| 345 | |
| 346 | catch_blocks.Put(address, catch_block); |
| 347 | catch_block->SetTryCatchInformation( |
Nicolas Geoffray | ebc3b25 | 2019-01-09 10:47:02 +0000 | [diff] [blame] | 348 | new (allocator_) TryCatchInformation(iterator.GetHandlerTypeIndex(), *dex_file_)); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 349 | } |
| 350 | handlers_ptr = iterator.EndDataPointer(); |
| 351 | } |
| 352 | |
| 353 | // Do a pass over the try blocks and insert entering TryBoundaries where at |
| 354 | // least one predecessor is not covered by the same TryItem as the try block. |
| 355 | // We do not split each edge separately, but rather create one boundary block |
| 356 | // that all predecessors are relinked to. This preserves loop headers (b/23895756). |
Vladimir Marko | 7d157fc | 2017-05-10 16:29:23 +0100 | [diff] [blame] | 357 | for (const auto& entry : try_block_info) { |
| 358 | uint32_t block_id = entry.first; |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 359 | const dex::TryItem* try_item = entry.second; |
Vladimir Marko | 7d157fc | 2017-05-10 16:29:23 +0100 | [diff] [blame] | 360 | HBasicBlock* try_block = graph_->GetBlocks()[block_id]; |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 361 | for (HBasicBlock* predecessor : try_block->GetPredecessors()) { |
Vladimir Marko | 7d157fc | 2017-05-10 16:29:23 +0100 | [diff] [blame] | 362 | if (GetTryItem(predecessor, try_block_info) != try_item) { |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 363 | // Found a predecessor not covered by the same TryItem. Insert entering |
| 364 | // boundary block. |
Vladimir Marko | 69d310e | 2017-10-09 14:12:23 +0100 | [diff] [blame] | 365 | HTryBoundary* try_entry = new (allocator_) HTryBoundary( |
| 366 | HTryBoundary::BoundaryKind::kEntry, try_block->GetDexPc()); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 367 | try_block->CreateImmediateDominator()->AddInstruction(try_entry); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 368 | LinkToCatchBlocks(try_entry, code_item_accessor_, try_item, catch_blocks); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 369 | break; |
| 370 | } |
| 371 | } |
| 372 | } |
| 373 | |
| 374 | // Do a second pass over the try blocks and insert exit TryBoundaries where |
| 375 | // the successor is not in the same TryItem. |
Vladimir Marko | 7d157fc | 2017-05-10 16:29:23 +0100 | [diff] [blame] | 376 | for (const auto& entry : try_block_info) { |
| 377 | uint32_t block_id = entry.first; |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 378 | const dex::TryItem* try_item = entry.second; |
Vladimir Marko | 7d157fc | 2017-05-10 16:29:23 +0100 | [diff] [blame] | 379 | HBasicBlock* try_block = graph_->GetBlocks()[block_id]; |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 380 | // NOTE: Do not use iterators because SplitEdge would invalidate them. |
| 381 | for (size_t i = 0, e = try_block->GetSuccessors().size(); i < e; ++i) { |
| 382 | HBasicBlock* successor = try_block->GetSuccessors()[i]; |
| 383 | |
| 384 | // If the successor is a try block, all of its predecessors must be |
| 385 | // covered by the same TryItem. Otherwise the previous pass would have |
| 386 | // created a non-throwing boundary block. |
| 387 | if (GetTryItem(successor, try_block_info) != nullptr) { |
Vladimir Marko | 7d157fc | 2017-05-10 16:29:23 +0100 | [diff] [blame] | 388 | DCHECK_EQ(try_item, GetTryItem(successor, try_block_info)); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 389 | continue; |
| 390 | } |
| 391 | |
| 392 | // Insert TryBoundary and link to catch blocks. |
| 393 | HTryBoundary* try_exit = |
Vladimir Marko | 69d310e | 2017-10-09 14:12:23 +0100 | [diff] [blame] | 394 | new (allocator_) HTryBoundary(HTryBoundary::BoundaryKind::kExit, successor->GetDexPc()); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 395 | graph_->SplitEdge(try_block, successor)->AddInstruction(try_exit); |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 396 | LinkToCatchBlocks(try_exit, code_item_accessor_, try_item, catch_blocks); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 397 | } |
| 398 | } |
| 399 | } |
| 400 | |
Vladimir Marko | aedc9bc | 2019-07-04 15:31:42 +0100 | [diff] [blame] | 401 | void HBasicBlockBuilder::InsertSynthesizedLoopsForOsr() { |
| 402 | ArenaSet<uint32_t> targets(allocator_->Adapter(kArenaAllocGraphBuilder)); |
| 403 | // Collect basic blocks that are targets of a negative branch. |
| 404 | for (const DexInstructionPcPair& pair : code_item_accessor_) { |
| 405 | const uint32_t dex_pc = pair.DexPc(); |
| 406 | const Instruction& instruction = pair.Inst(); |
| 407 | if (instruction.IsBranch()) { |
| 408 | uint32_t target_dex_pc = dex_pc + instruction.GetTargetOffset(); |
| 409 | if (target_dex_pc < dex_pc) { |
| 410 | HBasicBlock* block = GetBlockAt(target_dex_pc); |
| 411 | CHECK_NE(kNoDexPc, block->GetDexPc()); |
| 412 | targets.insert(block->GetBlockId()); |
| 413 | } |
| 414 | } else if (instruction.IsSwitch()) { |
| 415 | DexSwitchTable table(instruction, dex_pc); |
| 416 | for (DexSwitchTableIterator s_it(table); !s_it.Done(); s_it.Advance()) { |
| 417 | uint32_t target_dex_pc = dex_pc + s_it.CurrentTargetOffset(); |
| 418 | if (target_dex_pc < dex_pc) { |
| 419 | HBasicBlock* block = GetBlockAt(target_dex_pc); |
| 420 | CHECK_NE(kNoDexPc, block->GetDexPc()); |
| 421 | targets.insert(block->GetBlockId()); |
| 422 | } |
| 423 | } |
| 424 | } |
| 425 | } |
| 426 | |
| 427 | // Insert synthesized loops before the collected blocks. |
| 428 | for (uint32_t block_id : targets) { |
| 429 | HBasicBlock* block = graph_->GetBlocks()[block_id]; |
| 430 | HBasicBlock* loop_block = new (allocator_) HBasicBlock(graph_, block->GetDexPc()); |
| 431 | graph_->AddBlock(loop_block); |
| 432 | while (!block->GetPredecessors().empty()) { |
| 433 | block->GetPredecessors()[0]->ReplaceSuccessor(block, loop_block); |
| 434 | } |
| 435 | loop_block->AddSuccessor(loop_block); |
| 436 | loop_block->AddSuccessor(block); |
| 437 | // We loop on false - we know this won't be optimized later on as the loop |
| 438 | // is marked irreducible, which disables loop optimizations. |
| 439 | loop_block->AddInstruction(new (allocator_) HIf(graph_->GetIntConstant(0), kNoDexPc)); |
| 440 | } |
| 441 | } |
| 442 | |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 443 | bool HBasicBlockBuilder::Build() { |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 444 | DCHECK(code_item_accessor_.HasCodeItem()); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 445 | DCHECK(graph_->GetBlocks().empty()); |
| 446 | |
Vladimir Marko | 69d310e | 2017-10-09 14:12:23 +0100 | [diff] [blame] | 447 | graph_->SetEntryBlock(new (allocator_) HBasicBlock(graph_, kNoDexPc)); |
| 448 | graph_->SetExitBlock(new (allocator_) HBasicBlock(graph_, kNoDexPc)); |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 449 | |
| 450 | // TODO(dbrazdil): Do CreateBranchTargets and ConnectBasicBlocks in one pass. |
| 451 | if (!CreateBranchTargets()) { |
| 452 | return false; |
| 453 | } |
| 454 | |
| 455 | ConnectBasicBlocks(); |
| 456 | InsertTryBoundaryBlocks(); |
| 457 | |
Vladimir Marko | aedc9bc | 2019-07-04 15:31:42 +0100 | [diff] [blame] | 458 | if (graph_->IsCompilingOsr()) { |
| 459 | InsertSynthesizedLoopsForOsr(); |
| 460 | } |
| 461 | |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 462 | return true; |
| 463 | } |
| 464 | |
Vladimir Marko | 92f7f3c | 2017-10-31 11:38:30 +0000 | [diff] [blame] | 465 | void HBasicBlockBuilder::BuildIntrinsic() { |
Mathieu Chartier | 808c7a5 | 2017-12-15 11:19:33 -0800 | [diff] [blame] | 466 | DCHECK(!code_item_accessor_.HasCodeItem()); |
Vladimir Marko | 92f7f3c | 2017-10-31 11:38:30 +0000 | [diff] [blame] | 467 | DCHECK(graph_->GetBlocks().empty()); |
| 468 | |
| 469 | // Create blocks. |
| 470 | HBasicBlock* entry_block = new (allocator_) HBasicBlock(graph_, kNoDexPc); |
| 471 | HBasicBlock* exit_block = new (allocator_) HBasicBlock(graph_, kNoDexPc); |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 472 | HBasicBlock* body = MaybeCreateBlockAt(/* semantic_dex_pc= */ kNoDexPc, /* store_dex_pc= */ 0u); |
Vladimir Marko | 92f7f3c | 2017-10-31 11:38:30 +0000 | [diff] [blame] | 473 | |
| 474 | // Add blocks to the graph. |
| 475 | graph_->AddBlock(entry_block); |
| 476 | graph_->AddBlock(body); |
| 477 | graph_->AddBlock(exit_block); |
| 478 | graph_->SetEntryBlock(entry_block); |
| 479 | graph_->SetExitBlock(exit_block); |
| 480 | |
| 481 | // Connect blocks. |
| 482 | entry_block->AddSuccessor(body); |
| 483 | body->AddSuccessor(exit_block); |
| 484 | } |
| 485 | |
Mathieu Chartier | de4b08f | 2017-07-10 14:13:41 -0700 | [diff] [blame] | 486 | size_t HBasicBlockBuilder::GetQuickenIndex(uint32_t dex_pc) const { |
| 487 | return quicken_index_for_dex_pc_.Get(dex_pc); |
| 488 | } |
| 489 | |
David Brazdil | 86ea7ee | 2016-02-16 09:26:07 +0000 | [diff] [blame] | 490 | } // namespace art |