Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2009 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | c56057e | 2014-05-04 13:18:58 -0700 | [diff] [blame] | 17 | #include "indirect_reference_table-inl.h" |
| 18 | |
Mathieu Chartier | 8778c52 | 2016-10-04 19:06:30 -0700 | [diff] [blame] | 19 | #include "base/dumpable-inl.h" |
Mathieu Chartier | dabdc0f | 2016-03-04 14:58:03 -0800 | [diff] [blame] | 20 | #include "base/systrace.h" |
Elliott Hughes | a250199 | 2011-08-26 19:39:54 -0700 | [diff] [blame] | 21 | #include "jni_internal.h" |
Mathieu Chartier | ff6d8cf | 2015-06-02 13:40:12 -0700 | [diff] [blame] | 22 | #include "nth_caller_visitor.h" |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 23 | #include "reference_table.h" |
Elliott Hughes | a250199 | 2011-08-26 19:39:54 -0700 | [diff] [blame] | 24 | #include "runtime.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 25 | #include "scoped_thread_state_change-inl.h" |
Ian Rogers | 5a7a74a | 2011-09-26 16:32:29 -0700 | [diff] [blame] | 26 | #include "thread.h" |
Ian Rogers | cdd1d2d | 2011-08-18 09:58:17 -0700 | [diff] [blame] | 27 | #include "utils.h" |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 28 | |
| 29 | #include <cstdlib> |
| 30 | |
| 31 | namespace art { |
| 32 | |
Mathieu Chartier | 2ada67b | 2015-07-30 11:41:04 -0700 | [diff] [blame] | 33 | static constexpr bool kDumpStackOnNonLocalReference = false; |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 34 | static constexpr bool kDebugIRT = false; |
Mathieu Chartier | 2ada67b | 2015-07-30 11:41:04 -0700 | [diff] [blame] | 35 | |
Andreas Gampe | f1e8630 | 2016-10-03 11:42:31 -0700 | [diff] [blame] | 36 | const char* GetIndirectRefKindString(const IndirectRefKind& kind) { |
| 37 | switch (kind) { |
| 38 | case kHandleScopeOrInvalid: |
| 39 | return "HandleScopeOrInvalid"; |
| 40 | case kLocal: |
| 41 | return "Local"; |
| 42 | case kGlobal: |
| 43 | return "Global"; |
| 44 | case kWeakGlobal: |
| 45 | return "WeakGlobal"; |
| 46 | } |
| 47 | return "IndirectRefKind Error"; |
| 48 | } |
| 49 | |
Andreas Gampe | f1e8630 | 2016-10-03 11:42:31 -0700 | [diff] [blame] | 50 | void IndirectReferenceTable::AbortIfNoCheckJNI(const std::string& msg) { |
Elliott Hughes | a250199 | 2011-08-26 19:39:54 -0700 | [diff] [blame] | 51 | // If -Xcheck:jni is on, it'll give a more detailed error before aborting. |
Ian Rogers | 68d8b42 | 2014-07-17 11:09:10 -0700 | [diff] [blame] | 52 | JavaVMExt* vm = Runtime::Current()->GetJavaVM(); |
| 53 | if (!vm->IsCheckJniEnabled()) { |
Elliott Hughes | a250199 | 2011-08-26 19:39:54 -0700 | [diff] [blame] | 54 | // Otherwise, we want to abort rather than hand back a bad reference. |
Andreas Gampe | f1e8630 | 2016-10-03 11:42:31 -0700 | [diff] [blame] | 55 | LOG(FATAL) << msg; |
| 56 | } else { |
| 57 | LOG(ERROR) << msg; |
Elliott Hughes | a250199 | 2011-08-26 19:39:54 -0700 | [diff] [blame] | 58 | } |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 59 | } |
| 60 | |
Andreas Gampe | a8e3b86 | 2016-10-17 20:12:52 -0700 | [diff] [blame] | 61 | IndirectReferenceTable::IndirectReferenceTable(size_t max_count, |
| 62 | IndirectRefKind desired_kind, |
Andreas Gampe | 9d7ef62 | 2016-10-24 19:35:19 -0700 | [diff] [blame] | 63 | ResizableCapacity resizable, |
Richard Uhler | da0a69e | 2016-10-11 15:06:38 +0100 | [diff] [blame] | 64 | std::string* error_msg) |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 65 | : segment_state_(kIRTFirstSegment), |
| 66 | kind_(desired_kind), |
| 67 | max_entries_(max_count), |
Andreas Gampe | 9d7ef62 | 2016-10-24 19:35:19 -0700 | [diff] [blame] | 68 | current_num_holes_(0), |
| 69 | resizable_(resizable) { |
Richard Uhler | da0a69e | 2016-10-11 15:06:38 +0100 | [diff] [blame] | 70 | CHECK(error_msg != nullptr); |
Andreas Gampe | a8e3b86 | 2016-10-17 20:12:52 -0700 | [diff] [blame] | 71 | CHECK_NE(desired_kind, kHandleScopeOrInvalid); |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 72 | |
Andreas Gampe | a8e3b86 | 2016-10-17 20:12:52 -0700 | [diff] [blame] | 73 | const size_t table_bytes = max_count * sizeof(IrtEntry); |
Mathieu Chartier | c56057e | 2014-05-04 13:18:58 -0700 | [diff] [blame] | 74 | table_mem_map_.reset(MemMap::MapAnonymous("indirect ref table", nullptr, table_bytes, |
Richard Uhler | da0a69e | 2016-10-11 15:06:38 +0100 | [diff] [blame] | 75 | PROT_READ | PROT_WRITE, false, false, error_msg)); |
| 76 | if (table_mem_map_.get() == nullptr && error_msg->empty()) { |
| 77 | *error_msg = "Unable to map memory for indirect ref table"; |
Andreas Gampe | 3f5881f | 2015-04-08 10:26:16 -0700 | [diff] [blame] | 78 | } |
Richard Uhler | da0a69e | 2016-10-11 15:06:38 +0100 | [diff] [blame] | 79 | |
| 80 | if (table_mem_map_.get() != nullptr) { |
| 81 | table_ = reinterpret_cast<IrtEntry*>(table_mem_map_->Begin()); |
| 82 | } else { |
| 83 | table_ = nullptr; |
| 84 | } |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 85 | segment_state_ = kIRTFirstSegment; |
Andreas Gampe | 94a5202 | 2016-10-25 12:01:48 -0700 | [diff] [blame] | 86 | last_known_previous_state_ = kIRTFirstSegment; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 87 | } |
| 88 | |
| 89 | IndirectReferenceTable::~IndirectReferenceTable() { |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 90 | } |
| 91 | |
Andreas Gampe | dc061d0 | 2016-10-24 13:19:37 -0700 | [diff] [blame] | 92 | void IndirectReferenceTable::ConstexprChecks() { |
| 93 | // Use this for some assertions. They can't be put into the header as C++ wants the class |
| 94 | // to be complete. |
| 95 | |
| 96 | // Check kind. |
| 97 | static_assert((EncodeIndirectRefKind(kLocal) & (~kKindMask)) == 0, "Kind encoding error"); |
| 98 | static_assert((EncodeIndirectRefKind(kGlobal) & (~kKindMask)) == 0, "Kind encoding error"); |
| 99 | static_assert((EncodeIndirectRefKind(kWeakGlobal) & (~kKindMask)) == 0, "Kind encoding error"); |
| 100 | static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kLocal)) == kLocal, |
| 101 | "Kind encoding error"); |
| 102 | static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kGlobal)) == kGlobal, |
| 103 | "Kind encoding error"); |
| 104 | static_assert(DecodeIndirectRefKind(EncodeIndirectRefKind(kWeakGlobal)) == kWeakGlobal, |
| 105 | "Kind encoding error"); |
| 106 | |
| 107 | // Check serial. |
| 108 | static_assert(DecodeSerial(EncodeSerial(0u)) == 0u, "Serial encoding error"); |
| 109 | static_assert(DecodeSerial(EncodeSerial(1u)) == 1u, "Serial encoding error"); |
| 110 | static_assert(DecodeSerial(EncodeSerial(2u)) == 2u, "Serial encoding error"); |
| 111 | static_assert(DecodeSerial(EncodeSerial(3u)) == 3u, "Serial encoding error"); |
| 112 | |
| 113 | // Table index. |
| 114 | static_assert(DecodeIndex(EncodeIndex(0u)) == 0u, "Index encoding error"); |
| 115 | static_assert(DecodeIndex(EncodeIndex(1u)) == 1u, "Index encoding error"); |
| 116 | static_assert(DecodeIndex(EncodeIndex(2u)) == 2u, "Index encoding error"); |
| 117 | static_assert(DecodeIndex(EncodeIndex(3u)) == 3u, "Index encoding error"); |
| 118 | } |
| 119 | |
Andreas Gampe | 3f5881f | 2015-04-08 10:26:16 -0700 | [diff] [blame] | 120 | bool IndirectReferenceTable::IsValid() const { |
| 121 | return table_mem_map_.get() != nullptr; |
| 122 | } |
| 123 | |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 124 | // Holes: |
| 125 | // |
| 126 | // To keep the IRT compact, we want to fill "holes" created by non-stack-discipline Add & Remove |
| 127 | // operation sequences. For simplicity and lower memory overhead, we do not use a free list or |
| 128 | // similar. Instead, we scan for holes, with the expectation that we will find holes fast as they |
| 129 | // are usually near the end of the table (see the header, TODO: verify this assumption). To avoid |
| 130 | // scans when there are no holes, the number of known holes should be tracked. |
| 131 | // |
| 132 | // A previous implementation stored the top index and the number of holes as the segment state. |
| 133 | // This constraints the maximum number of references to 16-bit. We want to relax this, as it |
| 134 | // is easy to require more references (e.g., to list all classes in large applications). Thus, |
| 135 | // the implicitly stack-stored state, the IRTSegmentState, is only the top index. |
| 136 | // |
| 137 | // Thus, hole count is a local property of the current segment, and needs to be recovered when |
| 138 | // (or after) a frame is pushed or popped. To keep JNI transitions simple (and inlineable), we |
| 139 | // cannot do work when the segment changes. Thus, Add and Remove need to ensure the current |
| 140 | // hole count is correct. |
| 141 | // |
| 142 | // To be able to detect segment changes, we require an additional local field that can describe |
| 143 | // the known segment. This is last_known_previous_state_. The requirement will become clear with |
| 144 | // the following (some non-trivial) cases that have to be supported: |
| 145 | // |
| 146 | // 1) Segment with holes (current_num_holes_ > 0), push new segment, add/remove reference |
| 147 | // 2) Segment with holes (current_num_holes_ > 0), pop segment, add/remove reference |
| 148 | // 3) Segment with holes (current_num_holes_ > 0), push new segment, pop segment, add/remove |
| 149 | // reference |
| 150 | // 4) Empty segment, push new segment, create a hole, pop a segment, add/remove a reference |
| 151 | // 5) Base segment, push new segment, create a hole, pop a segment, push new segment, add/remove |
| 152 | // reference |
| 153 | // |
| 154 | // Storing the last known *previous* state (bottom index) allows conservatively detecting all the |
| 155 | // segment changes above. The condition is simply that the last known state is greater than or |
| 156 | // equal to the current previous state, and smaller than the current state (top index). The |
| 157 | // condition is conservative as it adds O(1) overhead to operations on an empty segment. |
| 158 | |
| 159 | static size_t CountNullEntries(const IrtEntry* table, size_t from, size_t to) { |
| 160 | size_t count = 0; |
| 161 | for (size_t index = from; index != to; ++index) { |
| 162 | if (table[index].GetReference()->IsNull()) { |
| 163 | count++; |
| 164 | } |
| 165 | } |
| 166 | return count; |
| 167 | } |
| 168 | |
| 169 | void IndirectReferenceTable::RecoverHoles(IRTSegmentState prev_state) { |
| 170 | if (last_known_previous_state_.top_index >= segment_state_.top_index || |
| 171 | last_known_previous_state_.top_index < prev_state.top_index) { |
| 172 | const size_t top_index = segment_state_.top_index; |
| 173 | size_t count = CountNullEntries(table_, prev_state.top_index, top_index); |
| 174 | |
| 175 | if (kDebugIRT) { |
| 176 | LOG(INFO) << "+++ Recovered holes: " |
| 177 | << " Current prev=" << prev_state.top_index |
| 178 | << " Current top_index=" << top_index |
| 179 | << " Old num_holes=" << current_num_holes_ |
| 180 | << " New num_holes=" << count; |
| 181 | } |
| 182 | |
| 183 | current_num_holes_ = count; |
| 184 | last_known_previous_state_ = prev_state; |
| 185 | } else if (kDebugIRT) { |
| 186 | LOG(INFO) << "No need to recover holes"; |
| 187 | } |
| 188 | } |
| 189 | |
| 190 | ALWAYS_INLINE |
| 191 | static inline void CheckHoleCount(IrtEntry* table, |
| 192 | size_t exp_num_holes, |
| 193 | IRTSegmentState prev_state, |
| 194 | IRTSegmentState cur_state) { |
| 195 | if (kIsDebugBuild) { |
| 196 | size_t count = CountNullEntries(table, prev_state.top_index, cur_state.top_index); |
| 197 | CHECK_EQ(exp_num_holes, count) << "prevState=" << prev_state.top_index |
| 198 | << " topIndex=" << cur_state.top_index; |
| 199 | } |
| 200 | } |
| 201 | |
| 202 | bool IndirectReferenceTable::Resize(size_t new_size, std::string* error_msg) { |
| 203 | CHECK_GT(new_size, max_entries_); |
| 204 | |
| 205 | const size_t table_bytes = new_size * sizeof(IrtEntry); |
| 206 | std::unique_ptr<MemMap> new_map(MemMap::MapAnonymous("indirect ref table", |
| 207 | nullptr, |
| 208 | table_bytes, |
| 209 | PROT_READ | PROT_WRITE, |
| 210 | false, |
| 211 | false, |
| 212 | error_msg)); |
| 213 | if (new_map == nullptr) { |
| 214 | return false; |
| 215 | } |
| 216 | |
| 217 | memcpy(new_map->Begin(), table_mem_map_->Begin(), table_mem_map_->Size()); |
| 218 | table_mem_map_ = std::move(new_map); |
| 219 | table_ = reinterpret_cast<IrtEntry*>(table_mem_map_->Begin()); |
| 220 | max_entries_ = new_size; |
| 221 | |
| 222 | return true; |
| 223 | } |
| 224 | |
| 225 | IndirectRef IndirectReferenceTable::Add(IRTSegmentState previous_state, |
| 226 | ObjPtr<mirror::Object> obj) { |
| 227 | if (kDebugIRT) { |
| 228 | LOG(INFO) << "+++ Add: previous_state=" << previous_state.top_index |
| 229 | << " top_index=" << segment_state_.top_index |
| 230 | << " last_known_prev_top_index=" << last_known_previous_state_.top_index |
| 231 | << " holes=" << current_num_holes_; |
| 232 | } |
| 233 | |
| 234 | size_t top_index = segment_state_.top_index; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 235 | |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 236 | CHECK(obj != nullptr); |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 237 | VerifyObject(obj); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 238 | DCHECK(table_ != nullptr); |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 239 | |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 240 | if (top_index == max_entries_) { |
Andreas Gampe | 9d7ef62 | 2016-10-24 19:35:19 -0700 | [diff] [blame] | 241 | if (resizable_ == ResizableCapacity::kNo) { |
| 242 | LOG(FATAL) << "JNI ERROR (app bug): " << kind_ << " table overflow " |
| 243 | << "(max=" << max_entries_ << ")\n" |
| 244 | << MutatorLockedDumpable<IndirectReferenceTable>(*this); |
| 245 | UNREACHABLE(); |
| 246 | } |
| 247 | |
| 248 | // Try to double space. |
| 249 | std::string error_msg; |
| 250 | if (!Resize(max_entries_ * 2, &error_msg)) { |
| 251 | LOG(FATAL) << "JNI ERROR (app bug): " << kind_ << " table overflow " |
| 252 | << "(max=" << max_entries_ << ")" << std::endl |
| 253 | << MutatorLockedDumpable<IndirectReferenceTable>(*this) |
| 254 | << " Resizing failed: " << error_msg; |
| 255 | UNREACHABLE(); |
| 256 | } |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 257 | } |
| 258 | |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 259 | RecoverHoles(previous_state); |
| 260 | CheckHoleCount(table_, current_num_holes_, previous_state, segment_state_); |
| 261 | |
Elliott Hughes | 73e66f7 | 2012-05-09 09:34:45 -0700 | [diff] [blame] | 262 | // We know there's enough room in the table. Now we just need to find |
| 263 | // the right spot. If there's a hole, find it and fill it; otherwise, |
| 264 | // add to the end of the list. |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 265 | IndirectRef result; |
Mathieu Chartier | 4838d66 | 2014-09-25 15:27:43 -0700 | [diff] [blame] | 266 | size_t index; |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 267 | if (current_num_holes_ > 0) { |
| 268 | DCHECK_GT(top_index, 1U); |
Elliott Hughes | 73e66f7 | 2012-05-09 09:34:45 -0700 | [diff] [blame] | 269 | // Find the first hole; likely to be near the end of the list. |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 270 | IrtEntry* p_scan = &table_[top_index - 1]; |
| 271 | DCHECK(!p_scan->GetReference()->IsNull()); |
| 272 | --p_scan; |
| 273 | while (!p_scan->GetReference()->IsNull()) { |
| 274 | DCHECK_GE(p_scan, table_ + previous_state.top_index); |
| 275 | --p_scan; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 276 | } |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 277 | index = p_scan - table_; |
| 278 | current_num_holes_--; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 279 | } else { |
Elliott Hughes | 73e66f7 | 2012-05-09 09:34:45 -0700 | [diff] [blame] | 280 | // Add to the end. |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 281 | index = top_index++; |
| 282 | segment_state_.top_index = top_index; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 283 | } |
Mathieu Chartier | 4838d66 | 2014-09-25 15:27:43 -0700 | [diff] [blame] | 284 | table_[index].Add(obj); |
| 285 | result = ToIndirectRef(index); |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 286 | if (kDebugIRT) { |
| 287 | LOG(INFO) << "+++ added at " << ExtractIndex(result) << " top=" << segment_state_.top_index |
| 288 | << " holes=" << current_num_holes_; |
Ian Rogers | 5a7a74a | 2011-09-26 16:32:29 -0700 | [diff] [blame] | 289 | } |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 290 | |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 291 | DCHECK(result != nullptr); |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 292 | return result; |
| 293 | } |
| 294 | |
Elliott Hughes | 726079d | 2011-10-07 18:43:44 -0700 | [diff] [blame] | 295 | void IndirectReferenceTable::AssertEmpty() { |
Hiroshi Yamauchi | 8a74117 | 2014-09-08 13:22:56 -0700 | [diff] [blame] | 296 | for (size_t i = 0; i < Capacity(); ++i) { |
Mathieu Chartier | 4838d66 | 2014-09-25 15:27:43 -0700 | [diff] [blame] | 297 | if (!table_[i].GetReference()->IsNull()) { |
Hiroshi Yamauchi | 8a74117 | 2014-09-08 13:22:56 -0700 | [diff] [blame] | 298 | LOG(FATAL) << "Internal Error: non-empty local reference table\n" |
| 299 | << MutatorLockedDumpable<IndirectReferenceTable>(*this); |
Mathieu Chartier | 8778c52 | 2016-10-04 19:06:30 -0700 | [diff] [blame] | 300 | UNREACHABLE(); |
Hiroshi Yamauchi | 8a74117 | 2014-09-08 13:22:56 -0700 | [diff] [blame] | 301 | } |
Elliott Hughes | 726079d | 2011-10-07 18:43:44 -0700 | [diff] [blame] | 302 | } |
| 303 | } |
| 304 | |
Elliott Hughes | 73e66f7 | 2012-05-09 09:34:45 -0700 | [diff] [blame] | 305 | // Removes an object. We extract the table offset bits from "iref" |
| 306 | // and zap the corresponding entry, leaving a hole if it's not at the top. |
| 307 | // If the entry is not between the current top index and the bottom index |
| 308 | // specified by the cookie, we don't remove anything. This is the behavior |
| 309 | // required by JNI's DeleteLocalRef function. |
| 310 | // This method is not called when a local frame is popped; this is only used |
| 311 | // for explicit single removals. |
| 312 | // Returns "false" if nothing was removed. |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 313 | bool IndirectReferenceTable::Remove(IRTSegmentState previous_state, IndirectRef iref) { |
| 314 | if (kDebugIRT) { |
| 315 | LOG(INFO) << "+++ Remove: previous_state=" << previous_state.top_index |
| 316 | << " top_index=" << segment_state_.top_index |
| 317 | << " last_known_prev_top_index=" << last_known_previous_state_.top_index |
| 318 | << " holes=" << current_num_holes_; |
| 319 | } |
| 320 | |
| 321 | const uint32_t top_index = segment_state_.top_index; |
| 322 | const uint32_t bottom_index = previous_state.top_index; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 323 | |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 324 | DCHECK(table_ != nullptr); |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 325 | |
Mathieu Chartier | c263bf8 | 2015-04-29 09:57:48 -0700 | [diff] [blame] | 326 | if (GetIndirectRefKind(iref) == kHandleScopeOrInvalid) { |
| 327 | auto* self = Thread::Current(); |
| 328 | if (self->HandleScopeContains(reinterpret_cast<jobject>(iref))) { |
| 329 | auto* env = self->GetJniEnv(); |
| 330 | DCHECK(env != nullptr); |
| 331 | if (env->check_jni) { |
Mathieu Chartier | ff6d8cf | 2015-06-02 13:40:12 -0700 | [diff] [blame] | 332 | ScopedObjectAccess soa(self); |
| 333 | LOG(WARNING) << "Attempt to remove non-JNI local reference, dumping thread"; |
Mathieu Chartier | 2ada67b | 2015-07-30 11:41:04 -0700 | [diff] [blame] | 334 | if (kDumpStackOnNonLocalReference) { |
Andreas Gampe | 3fec9ac | 2016-09-13 10:47:28 -0700 | [diff] [blame] | 335 | self->Dump(LOG_STREAM(WARNING)); |
Mathieu Chartier | 2ada67b | 2015-07-30 11:41:04 -0700 | [diff] [blame] | 336 | } |
Mathieu Chartier | c263bf8 | 2015-04-29 09:57:48 -0700 | [diff] [blame] | 337 | } |
| 338 | return true; |
| 339 | } |
Ian Rogers | 5a7a74a | 2011-09-26 16:32:29 -0700 | [diff] [blame] | 340 | } |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 341 | const uint32_t idx = ExtractIndex(iref); |
| 342 | if (idx < bottom_index) { |
Elliott Hughes | 726079d | 2011-10-07 18:43:44 -0700 | [diff] [blame] | 343 | // Wrong segment. |
| 344 | LOG(WARNING) << "Attempt to remove index outside index area (" << idx |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 345 | << " vs " << bottom_index << "-" << top_index << ")"; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 346 | return false; |
| 347 | } |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 348 | if (idx >= top_index) { |
Elliott Hughes | 726079d | 2011-10-07 18:43:44 -0700 | [diff] [blame] | 349 | // Bad --- stale reference? |
| 350 | LOG(WARNING) << "Attempt to remove invalid index " << idx |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 351 | << " (bottom=" << bottom_index << " top=" << top_index << ")"; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 352 | return false; |
| 353 | } |
| 354 | |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 355 | RecoverHoles(previous_state); |
| 356 | CheckHoleCount(table_, current_num_holes_, previous_state, segment_state_); |
| 357 | |
| 358 | if (idx == top_index - 1) { |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 359 | // Top-most entry. Scan up and consume holes. |
| 360 | |
Ian Rogers | 987560f | 2014-04-22 11:42:59 -0700 | [diff] [blame] | 361 | if (!CheckEntry("remove", iref, idx)) { |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 362 | return false; |
| 363 | } |
| 364 | |
Mathieu Chartier | 4838d66 | 2014-09-25 15:27:43 -0700 | [diff] [blame] | 365 | *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr); |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 366 | if (current_num_holes_ != 0) { |
| 367 | uint32_t collapse_top_index = top_index; |
| 368 | while (--collapse_top_index > bottom_index && current_num_holes_ != 0) { |
| 369 | if (kDebugIRT) { |
| 370 | ScopedObjectAccess soa(Thread::Current()); |
| 371 | LOG(INFO) << "+++ checking for hole at " << collapse_top_index - 1 |
| 372 | << " (previous_state=" << bottom_index << ") val=" |
| 373 | << table_[collapse_top_index - 1].GetReference()->Read<kWithoutReadBarrier>(); |
Ian Rogers | 5a7a74a | 2011-09-26 16:32:29 -0700 | [diff] [blame] | 374 | } |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 375 | if (!table_[collapse_top_index - 1].GetReference()->IsNull()) { |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 376 | break; |
| 377 | } |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 378 | if (kDebugIRT) { |
| 379 | LOG(INFO) << "+++ ate hole at " << (collapse_top_index - 1); |
Ian Rogers | 5a7a74a | 2011-09-26 16:32:29 -0700 | [diff] [blame] | 380 | } |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 381 | current_num_holes_--; |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 382 | } |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 383 | segment_state_.top_index = collapse_top_index; |
| 384 | |
| 385 | CheckHoleCount(table_, current_num_holes_, previous_state, segment_state_); |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 386 | } else { |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 387 | segment_state_.top_index = top_index - 1; |
| 388 | if (kDebugIRT) { |
| 389 | LOG(INFO) << "+++ ate last entry " << top_index - 1; |
Ian Rogers | 5a7a74a | 2011-09-26 16:32:29 -0700 | [diff] [blame] | 390 | } |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 391 | } |
| 392 | } else { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 393 | // Not the top-most entry. This creates a hole. We null out the entry to prevent somebody |
| 394 | // from deleting it twice and screwing up the hole count. |
Mathieu Chartier | 4838d66 | 2014-09-25 15:27:43 -0700 | [diff] [blame] | 395 | if (table_[idx].GetReference()->IsNull()) { |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 396 | LOG(INFO) << "--- WEIRD: removing null entry " << idx; |
| 397 | return false; |
| 398 | } |
Ian Rogers | 987560f | 2014-04-22 11:42:59 -0700 | [diff] [blame] | 399 | if (!CheckEntry("remove", iref, idx)) { |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 400 | return false; |
| 401 | } |
| 402 | |
Mathieu Chartier | 4838d66 | 2014-09-25 15:27:43 -0700 | [diff] [blame] | 403 | *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr); |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 404 | current_num_holes_++; |
| 405 | CheckHoleCount(table_, current_num_holes_, previous_state, segment_state_); |
| 406 | if (kDebugIRT) { |
| 407 | LOG(INFO) << "+++ left hole at " << idx << ", holes=" << current_num_holes_; |
Ian Rogers | 5a7a74a | 2011-09-26 16:32:29 -0700 | [diff] [blame] | 408 | } |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 409 | } |
| 410 | |
| 411 | return true; |
| 412 | } |
| 413 | |
Mathieu Chartier | 91c2f0c | 2014-11-26 11:21:15 -0800 | [diff] [blame] | 414 | void IndirectReferenceTable::Trim() { |
Mathieu Chartier | dabdc0f | 2016-03-04 14:58:03 -0800 | [diff] [blame] | 415 | ScopedTrace trace(__PRETTY_FUNCTION__); |
Mathieu Chartier | 91c2f0c | 2014-11-26 11:21:15 -0800 | [diff] [blame] | 416 | const size_t top_index = Capacity(); |
| 417 | auto* release_start = AlignUp(reinterpret_cast<uint8_t*>(&table_[top_index]), kPageSize); |
| 418 | uint8_t* release_end = table_mem_map_->End(); |
| 419 | madvise(release_start, release_end - release_start, MADV_DONTNEED); |
| 420 | } |
| 421 | |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 422 | void IndirectReferenceTable::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) { |
Mathieu Chartier | 4809d0a | 2015-04-07 10:39:04 -0700 | [diff] [blame] | 423 | BufferedRootVisitor<kDefaultBufferedRootCount> root_visitor(visitor, root_info); |
Mathieu Chartier | 02e2511 | 2013-08-14 16:14:24 -0700 | [diff] [blame] | 424 | for (auto ref : *this) { |
Mathieu Chartier | 9086b65 | 2015-04-14 09:35:18 -0700 | [diff] [blame] | 425 | if (!ref->IsNull()) { |
| 426 | root_visitor.VisitRoot(*ref); |
| 427 | DCHECK(!ref->IsNull()); |
| 428 | } |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 429 | } |
| 430 | } |
| 431 | |
Elliott Hughes | 73e66f7 | 2012-05-09 09:34:45 -0700 | [diff] [blame] | 432 | void IndirectReferenceTable::Dump(std::ostream& os) const { |
| 433 | os << kind_ << " table dump:\n"; |
Hiroshi Yamauchi | 196851b | 2014-05-29 12:16:04 -0700 | [diff] [blame] | 434 | ReferenceTable::Table entries; |
| 435 | for (size_t i = 0; i < Capacity(); ++i) { |
Mathieu Chartier | 8778c52 | 2016-10-04 19:06:30 -0700 | [diff] [blame] | 436 | ObjPtr<mirror::Object> obj = table_[i].GetReference()->Read<kWithoutReadBarrier>(); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 437 | if (obj != nullptr) { |
Mathieu Chartier | 4838d66 | 2014-09-25 15:27:43 -0700 | [diff] [blame] | 438 | obj = table_[i].GetReference()->Read(); |
Hiroshi Yamauchi | 94f7b49 | 2014-07-22 18:08:23 -0700 | [diff] [blame] | 439 | entries.push_back(GcRoot<mirror::Object>(obj)); |
Ian Rogers | 63818dc | 2012-09-26 12:23:04 -0700 | [diff] [blame] | 440 | } |
| 441 | } |
Elliott Hughes | 73e66f7 | 2012-05-09 09:34:45 -0700 | [diff] [blame] | 442 | ReferenceTable::Dump(os, entries); |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 443 | } |
| 444 | |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 445 | void IndirectReferenceTable::SetSegmentState(IRTSegmentState new_state) { |
| 446 | if (kDebugIRT) { |
| 447 | LOG(INFO) << "Setting segment state: " |
| 448 | << segment_state_.top_index |
| 449 | << " -> " |
| 450 | << new_state.top_index; |
| 451 | } |
| 452 | segment_state_ = new_state; |
| 453 | } |
| 454 | |
Elliott Hughes | 6c1a394 | 2011-08-17 15:00:06 -0700 | [diff] [blame] | 455 | } // namespace art |