Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_MIRROR_ARRAY_INL_H_ |
| 18 | #define ART_RUNTIME_MIRROR_ARRAY_INL_H_ |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 19 | |
| 20 | #include "array.h" |
| 21 | |
Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 22 | #include "base/stringprintf.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 23 | #include "class.h" |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 24 | #include "gc/heap-inl.h" |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 25 | #include "thread.h" |
| 26 | #include "utils.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 27 | |
| 28 | namespace art { |
| 29 | namespace mirror { |
| 30 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 31 | inline uint32_t Array::ClassSize() { |
| 32 | uint32_t vtable_entries = Object::kVTableLength; |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 33 | return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 34 | } |
| 35 | |
Hiroshi Yamauchi | 6e83c17 | 2014-05-01 21:25:41 -0700 | [diff] [blame] | 36 | template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption> |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 37 | inline size_t Array::SizeOf() { |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 38 | // This is safe from overflow because the array was already allocated, so we know it's sane. |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 39 | size_t component_size_shift = GetClass<kVerifyFlags, kReadBarrierOption>()-> |
| 40 | template GetComponentSizeShift<kReadBarrierOption>(); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 41 | // Don't need to check this since we already check this in GetClass. |
| 42 | int32_t component_count = |
| 43 | GetLength<static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis)>(); |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 44 | size_t header_size = DataOffset(1U << component_size_shift).SizeValue(); |
| 45 | size_t data_size = component_count << component_size_shift; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 46 | return header_size + data_size; |
| 47 | } |
| 48 | |
Ian Rogers | 7e70b00 | 2014-10-08 11:47:24 -0700 | [diff] [blame] | 49 | inline MemberOffset Array::DataOffset(size_t component_size) { |
| 50 | DCHECK(IsPowerOfTwo(component_size)) << component_size; |
| 51 | size_t data_offset = RoundUp(OFFSETOF_MEMBER(Array, first_element_), component_size); |
| 52 | DCHECK_EQ(RoundUp(data_offset, component_size), data_offset) |
| 53 | << "Array data offset isn't aligned with component size"; |
| 54 | return MemberOffset(data_offset); |
| 55 | } |
| 56 | |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 57 | template<VerifyObjectFlags kVerifyFlags> |
| 58 | inline bool Array::CheckIsValidIndex(int32_t index) { |
| 59 | if (UNLIKELY(static_cast<uint32_t>(index) >= |
| 60 | static_cast<uint32_t>(GetLength<kVerifyFlags>()))) { |
| 61 | ThrowArrayIndexOutOfBoundsException(index); |
| 62 | return false; |
| 63 | } |
| 64 | return true; |
| 65 | } |
| 66 | |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 67 | static inline size_t ComputeArraySize(int32_t component_count, size_t component_size_shift) { |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 68 | DCHECK_GE(component_count, 0); |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 69 | |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 70 | size_t component_size = 1U << component_size_shift; |
Hiroshi Yamauchi | aa866f5 | 2014-03-21 16:18:30 -0700 | [diff] [blame] | 71 | size_t header_size = Array::DataOffset(component_size).SizeValue(); |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 72 | size_t data_size = static_cast<size_t>(component_count) << component_size_shift; |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 73 | size_t size = header_size + data_size; |
| 74 | |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 75 | // Check for size_t overflow if this was an unreasonable request |
| 76 | // but let the caller throw OutOfMemoryError. |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 77 | #ifdef __LP64__ |
| 78 | // 64-bit. No overflow as component_count is 32-bit and the maximum |
| 79 | // component size is 8. |
| 80 | DCHECK_LE((1U << component_size_shift), 8U); |
| 81 | #else |
| 82 | // 32-bit. |
| 83 | DCHECK_NE(header_size, 0U); |
| 84 | DCHECK_EQ(RoundUp(header_size, component_size), header_size); |
| 85 | // The array length limit (exclusive). |
| 86 | const size_t length_limit = (0U - header_size) >> component_size_shift; |
| 87 | if (UNLIKELY(length_limit <= static_cast<size_t>(component_count))) { |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 88 | return 0; // failure |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 89 | } |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 90 | #endif |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 91 | return size; |
| 92 | } |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 93 | |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 94 | // Used for setting the array length in the allocation code path to ensure it is guarded by a |
| 95 | // StoreStore fence. |
Mathieu Chartier | 1febddf | 2013-11-20 12:33:14 -0800 | [diff] [blame] | 96 | class SetLengthVisitor { |
| 97 | public: |
| 98 | explicit SetLengthVisitor(int32_t length) : length_(length) { |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 99 | } |
Mathieu Chartier | 1febddf | 2013-11-20 12:33:14 -0800 | [diff] [blame] | 100 | |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 101 | void operator()(Object* obj, size_t usable_size) const |
| 102 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 103 | UNUSED(usable_size); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 104 | // Avoid AsArray as object is not yet in live bitmap or allocation stack. |
| 105 | Array* array = down_cast<Array*>(obj); |
| 106 | // DCHECK(array->IsArrayInstance()); |
Mathieu Chartier | 1febddf | 2013-11-20 12:33:14 -0800 | [diff] [blame] | 107 | array->SetLength(length_); |
| 108 | } |
| 109 | |
| 110 | private: |
| 111 | const int32_t length_; |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 112 | |
| 113 | DISALLOW_COPY_AND_ASSIGN(SetLengthVisitor); |
| 114 | }; |
| 115 | |
| 116 | // Similar to SetLengthVisitor, used for setting the array length to fill the usable size of an |
| 117 | // array. |
| 118 | class SetLengthToUsableSizeVisitor { |
| 119 | public: |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 120 | SetLengthToUsableSizeVisitor(int32_t min_length, size_t header_size, |
| 121 | size_t component_size_shift) : |
| 122 | minimum_length_(min_length), header_size_(header_size), |
| 123 | component_size_shift_(component_size_shift) { |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 124 | } |
| 125 | |
| 126 | void operator()(Object* obj, size_t usable_size) const |
| 127 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 128 | // Avoid AsArray as object is not yet in live bitmap or allocation stack. |
| 129 | Array* array = down_cast<Array*>(obj); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 130 | // DCHECK(array->IsArrayInstance()); |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 131 | int32_t length = (usable_size - header_size_) >> component_size_shift_; |
Ian Rogers | a55cf41 | 2014-02-27 00:31:26 -0800 | [diff] [blame] | 132 | DCHECK_GE(length, minimum_length_); |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 133 | uint8_t* old_end = reinterpret_cast<uint8_t*>(array->GetRawData(1U << component_size_shift_, |
| 134 | minimum_length_)); |
| 135 | uint8_t* new_end = reinterpret_cast<uint8_t*>(array->GetRawData(1U << component_size_shift_, |
| 136 | length)); |
Ian Rogers | a55cf41 | 2014-02-27 00:31:26 -0800 | [diff] [blame] | 137 | // Ensure space beyond original allocation is zeroed. |
| 138 | memset(old_end, 0, new_end - old_end); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 139 | array->SetLength(length); |
| 140 | } |
| 141 | |
| 142 | private: |
Ian Rogers | a55cf41 | 2014-02-27 00:31:26 -0800 | [diff] [blame] | 143 | const int32_t minimum_length_; |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 144 | const size_t header_size_; |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 145 | const size_t component_size_shift_; |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 146 | |
| 147 | DISALLOW_COPY_AND_ASSIGN(SetLengthToUsableSizeVisitor); |
Mathieu Chartier | 1febddf | 2013-11-20 12:33:14 -0800 | [diff] [blame] | 148 | }; |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 149 | |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 150 | template <bool kIsInstrumented, bool kFillUsable> |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 151 | inline Array* Array::Alloc(Thread* self, Class* array_class, int32_t component_count, |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 152 | size_t component_size_shift, gc::AllocatorType allocator_type) { |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 153 | DCHECK(allocator_type != gc::kAllocatorTypeLOS); |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 154 | DCHECK(array_class != nullptr); |
| 155 | DCHECK(array_class->IsArrayClass()); |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 156 | DCHECK_EQ(array_class->GetComponentSizeShift(), component_size_shift); |
| 157 | DCHECK_EQ(array_class->GetComponentSize(), (1U << component_size_shift)); |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 158 | size_t size = ComputeArraySize(component_count, component_size_shift); |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 159 | #ifdef __LP64__ |
| 160 | // 64-bit. No size_t overflow. |
| 161 | DCHECK_NE(size, 0U); |
| 162 | #else |
| 163 | // 32-bit. |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 164 | if (UNLIKELY(size == 0)) { |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 165 | self->ThrowOutOfMemoryError(StringPrintf("%s of length %d would overflow", |
| 166 | PrettyDescriptor(array_class).c_str(), |
| 167 | component_count).c_str()); |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 168 | return nullptr; |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 169 | } |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 170 | #endif |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 171 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 172 | Array* result; |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 173 | if (!kFillUsable) { |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 174 | SetLengthVisitor visitor(component_count); |
| 175 | result = down_cast<Array*>( |
| 176 | heap->AllocObjectWithAllocator<kIsInstrumented, true>(self, array_class, size, |
| 177 | allocator_type, visitor)); |
| 178 | } else { |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 179 | SetLengthToUsableSizeVisitor visitor(component_count, |
| 180 | DataOffset(1U << component_size_shift).SizeValue(), |
| 181 | component_size_shift); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 182 | result = down_cast<Array*>( |
| 183 | heap->AllocObjectWithAllocator<kIsInstrumented, true>(self, array_class, size, |
| 184 | allocator_type, visitor)); |
| 185 | } |
| 186 | if (kIsDebugBuild && result != nullptr && Runtime::Current()->IsStarted()) { |
Mathieu Chartier | 8580154 | 2014-02-27 18:06:26 -0800 | [diff] [blame] | 187 | array_class = result->GetClass(); // In case the array class moved. |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 188 | CHECK_EQ(array_class->GetComponentSize(), 1U << component_size_shift); |
| 189 | if (!kFillUsable) { |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 190 | CHECK_EQ(result->SizeOf(), size); |
| 191 | } else { |
| 192 | CHECK_GE(result->SizeOf(), size); |
| 193 | } |
| 194 | } |
| 195 | return result; |
Hiroshi Yamauchi | 967a0ad | 2013-09-10 16:24:21 -0700 | [diff] [blame] | 196 | } |
| 197 | |
Mathieu Chartier | c528dba | 2013-11-26 12:00:11 -0800 | [diff] [blame] | 198 | template<class T> |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 199 | inline void PrimitiveArray<T>::VisitRoots(RootVisitor* visitor) { |
| 200 | array_class_.VisitRootIfNonNull(visitor, RootInfo(kRootStickyClass)); |
Mathieu Chartier | c528dba | 2013-11-26 12:00:11 -0800 | [diff] [blame] | 201 | } |
| 202 | |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 203 | template<typename T> |
| 204 | inline PrimitiveArray<T>* PrimitiveArray<T>::Alloc(Thread* self, size_t length) { |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 205 | Array* raw_array = Array::Alloc<true>(self, GetArrayClass(), length, |
| 206 | ComponentSizeShiftWidth<sizeof(T)>(), |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 207 | Runtime::Current()->GetHeap()->GetCurrentAllocator()); |
| 208 | return down_cast<PrimitiveArray<T>*>(raw_array); |
| 209 | } |
| 210 | |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 211 | template<typename T> |
| 212 | inline T PrimitiveArray<T>::Get(int32_t i) { |
| 213 | if (!CheckIsValidIndex(i)) { |
| 214 | DCHECK(Thread::Current()->IsExceptionPending()); |
| 215 | return T(0); |
| 216 | } |
| 217 | return GetWithoutChecks(i); |
| 218 | } |
| 219 | |
| 220 | template<typename T> |
| 221 | inline void PrimitiveArray<T>::Set(int32_t i, T value) { |
| 222 | if (Runtime::Current()->IsActiveTransaction()) { |
| 223 | Set<true>(i, value); |
| 224 | } else { |
| 225 | Set<false>(i, value); |
| 226 | } |
| 227 | } |
| 228 | |
| 229 | template<typename T> |
| 230 | template<bool kTransactionActive, bool kCheckTransaction> |
| 231 | inline void PrimitiveArray<T>::Set(int32_t i, T value) { |
| 232 | if (CheckIsValidIndex(i)) { |
| 233 | SetWithoutChecks<kTransactionActive, kCheckTransaction>(i, value); |
| 234 | } else { |
| 235 | DCHECK(Thread::Current()->IsExceptionPending()); |
| 236 | } |
| 237 | } |
| 238 | |
| 239 | template<typename T> |
| 240 | template<bool kTransactionActive, bool kCheckTransaction> |
| 241 | inline void PrimitiveArray<T>::SetWithoutChecks(int32_t i, T value) { |
| 242 | if (kCheckTransaction) { |
| 243 | DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); |
| 244 | } |
| 245 | if (kTransactionActive) { |
| 246 | Runtime::Current()->RecordWriteArray(this, i, GetWithoutChecks(i)); |
| 247 | } |
| 248 | DCHECK(CheckIsValidIndex(i)); |
| 249 | GetData()[i] = value; |
| 250 | } |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 251 | // Backward copy where elements are of aligned appropriately for T. Count is in T sized units. |
| 252 | // Copies are guaranteed not to tear when the sizeof T is less-than 64bit. |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 253 | template<typename T> |
| 254 | static inline void ArrayBackwardCopy(T* d, const T* s, int32_t count) { |
| 255 | d += count; |
| 256 | s += count; |
| 257 | for (int32_t i = 0; i < count; ++i) { |
| 258 | d--; |
| 259 | s--; |
| 260 | *d = *s; |
| 261 | } |
| 262 | } |
| 263 | |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 264 | // Forward copy where elements are of aligned appropriately for T. Count is in T sized units. |
| 265 | // Copies are guaranteed not to tear when the sizeof T is less-than 64bit. |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 266 | template<typename T> |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 267 | static inline void ArrayForwardCopy(T* d, const T* s, int32_t count) { |
| 268 | for (int32_t i = 0; i < count; ++i) { |
| 269 | *d = *s; |
| 270 | d++; |
| 271 | s++; |
| 272 | } |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 273 | } |
| 274 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 275 | template<class T> |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 276 | inline void PrimitiveArray<T>::Memmove(int32_t dst_pos, PrimitiveArray<T>* src, int32_t src_pos, |
| 277 | int32_t count) { |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 278 | if (UNLIKELY(count == 0)) { |
| 279 | return; |
| 280 | } |
| 281 | DCHECK_GE(dst_pos, 0); |
| 282 | DCHECK_GE(src_pos, 0); |
| 283 | DCHECK_GT(count, 0); |
| 284 | DCHECK(src != nullptr); |
| 285 | DCHECK_LT(dst_pos, GetLength()); |
| 286 | DCHECK_LE(dst_pos, GetLength() - count); |
| 287 | DCHECK_LT(src_pos, src->GetLength()); |
| 288 | DCHECK_LE(src_pos, src->GetLength() - count); |
| 289 | |
| 290 | // Note for non-byte copies we can't rely on standard libc functions like memcpy(3) and memmove(3) |
| 291 | // in our implementation, because they may copy byte-by-byte. |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 292 | if (LIKELY(src != this)) { |
| 293 | // Memcpy ok for guaranteed non-overlapping distinct arrays. |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 294 | Memcpy(dst_pos, src, src_pos, count); |
| 295 | } else { |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 296 | // Handle copies within the same array using the appropriate direction copy. |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 297 | void* dst_raw = GetRawData(sizeof(T), dst_pos); |
| 298 | const void* src_raw = src->GetRawData(sizeof(T), src_pos); |
| 299 | if (sizeof(T) == sizeof(uint8_t)) { |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 300 | uint8_t* d = reinterpret_cast<uint8_t*>(dst_raw); |
| 301 | const uint8_t* s = reinterpret_cast<const uint8_t*>(src_raw); |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 302 | memmove(d, s, count); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 303 | } else { |
Ian Rogers | 99cb4ea | 2014-03-26 22:53:56 -0700 | [diff] [blame] | 304 | const bool copy_forward = (dst_pos < src_pos) || (dst_pos - src_pos >= count); |
| 305 | if (sizeof(T) == sizeof(uint16_t)) { |
| 306 | uint16_t* d = reinterpret_cast<uint16_t*>(dst_raw); |
| 307 | const uint16_t* s = reinterpret_cast<const uint16_t*>(src_raw); |
| 308 | if (copy_forward) { |
| 309 | ArrayForwardCopy<uint16_t>(d, s, count); |
| 310 | } else { |
| 311 | ArrayBackwardCopy<uint16_t>(d, s, count); |
| 312 | } |
| 313 | } else if (sizeof(T) == sizeof(uint32_t)) { |
| 314 | uint32_t* d = reinterpret_cast<uint32_t*>(dst_raw); |
| 315 | const uint32_t* s = reinterpret_cast<const uint32_t*>(src_raw); |
| 316 | if (copy_forward) { |
| 317 | ArrayForwardCopy<uint32_t>(d, s, count); |
| 318 | } else { |
| 319 | ArrayBackwardCopy<uint32_t>(d, s, count); |
| 320 | } |
| 321 | } else { |
| 322 | DCHECK_EQ(sizeof(T), sizeof(uint64_t)); |
| 323 | uint64_t* d = reinterpret_cast<uint64_t*>(dst_raw); |
| 324 | const uint64_t* s = reinterpret_cast<const uint64_t*>(src_raw); |
| 325 | if (copy_forward) { |
| 326 | ArrayForwardCopy<uint64_t>(d, s, count); |
| 327 | } else { |
| 328 | ArrayBackwardCopy<uint64_t>(d, s, count); |
| 329 | } |
| 330 | } |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 331 | } |
| 332 | } |
| 333 | } |
| 334 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 335 | template<class T> |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 336 | inline void PrimitiveArray<T>::Memcpy(int32_t dst_pos, PrimitiveArray<T>* src, int32_t src_pos, |
| 337 | int32_t count) { |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 338 | if (UNLIKELY(count == 0)) { |
| 339 | return; |
| 340 | } |
| 341 | DCHECK_GE(dst_pos, 0); |
| 342 | DCHECK_GE(src_pos, 0); |
| 343 | DCHECK_GT(count, 0); |
| 344 | DCHECK(src != nullptr); |
| 345 | DCHECK_LT(dst_pos, GetLength()); |
| 346 | DCHECK_LE(dst_pos, GetLength() - count); |
| 347 | DCHECK_LT(src_pos, src->GetLength()); |
| 348 | DCHECK_LE(src_pos, src->GetLength() - count); |
| 349 | |
| 350 | // Note for non-byte copies we can't rely on standard libc functions like memcpy(3) and memmove(3) |
| 351 | // in our implementation, because they may copy byte-by-byte. |
| 352 | void* dst_raw = GetRawData(sizeof(T), dst_pos); |
| 353 | const void* src_raw = src->GetRawData(sizeof(T), src_pos); |
| 354 | if (sizeof(T) == sizeof(uint8_t)) { |
| 355 | memcpy(dst_raw, src_raw, count); |
| 356 | } else if (sizeof(T) == sizeof(uint16_t)) { |
| 357 | uint16_t* d = reinterpret_cast<uint16_t*>(dst_raw); |
| 358 | const uint16_t* s = reinterpret_cast<const uint16_t*>(src_raw); |
| 359 | ArrayForwardCopy<uint16_t>(d, s, count); |
| 360 | } else if (sizeof(T) == sizeof(uint32_t)) { |
| 361 | uint32_t* d = reinterpret_cast<uint32_t*>(dst_raw); |
| 362 | const uint32_t* s = reinterpret_cast<const uint32_t*>(src_raw); |
| 363 | ArrayForwardCopy<uint32_t>(d, s, count); |
| 364 | } else { |
| 365 | DCHECK_EQ(sizeof(T), sizeof(uint64_t)); |
| 366 | uint64_t* d = reinterpret_cast<uint64_t*>(dst_raw); |
| 367 | const uint64_t* s = reinterpret_cast<const uint64_t*>(src_raw); |
| 368 | ArrayForwardCopy<uint64_t>(d, s, count); |
| 369 | } |
| 370 | } |
| 371 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 372 | } // namespace mirror |
| 373 | } // namespace art |
| 374 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 375 | #endif // ART_RUNTIME_MIRROR_ARRAY_INL_H_ |