summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--compiler/optimizing/instruction_simplifier.h6
-rw-r--r--compiler/optimizing/locations.h6
-rw-r--r--compiler/optimizing/nodes.h4
-rw-r--r--compiler/utils/arm/assembler_arm.h2
-rw-r--r--compiler/utils/intrusive_forward_list.h2
-rw-r--r--compiler/utils/swap_space.h6
-rw-r--r--runtime/base/allocator.h3
-rw-r--r--runtime/base/arena_allocator.h6
-rw-r--r--runtime/base/arena_containers.h4
-rw-r--r--runtime/base/scoped_arena_containers.h4
-rw-r--r--runtime/base/transform_array_ref.h2
-rw-r--r--runtime/base/transform_iterator.h2
-rw-r--r--runtime/experimental_flags.h3
-rw-r--r--runtime/gc/collector/concurrent_copying.h6
-rw-r--r--runtime/gc/collector/garbage_collector.h2
-rw-r--r--runtime/mirror/object_reference.h6
-rw-r--r--runtime/oat.h2
-rw-r--r--runtime/oat_quick_method_header.h10
-rw-r--r--runtime/thread_list.h2
19 files changed, 41 insertions, 37 deletions
diff --git a/compiler/optimizing/instruction_simplifier.h b/compiler/optimizing/instruction_simplifier.h
index 7905104ed4..7fe1067aa9 100644
--- a/compiler/optimizing/instruction_simplifier.h
+++ b/compiler/optimizing/instruction_simplifier.h
@@ -35,9 +35,9 @@ namespace art {
*/
class InstructionSimplifier : public HOptimization {
public:
- InstructionSimplifier(HGraph* graph,
- OptimizingCompilerStats* stats = nullptr,
- const char* name = kInstructionSimplifierPassName)
+ explicit InstructionSimplifier(HGraph* graph,
+ OptimizingCompilerStats* stats = nullptr,
+ const char* name = kInstructionSimplifierPassName)
: HOptimization(graph, name, stats) {}
static constexpr const char* kInstructionSimplifierPassName = "instruction_simplifier";
diff --git a/compiler/optimizing/locations.h b/compiler/optimizing/locations.h
index 43840422ca..c97c4a6c5b 100644
--- a/compiler/optimizing/locations.h
+++ b/compiler/optimizing/locations.h
@@ -488,9 +488,9 @@ class LocationSummary : public ArenaObject<kArenaAllocLocationSummary> {
kCallOnMainOnly
};
- LocationSummary(HInstruction* instruction,
- CallKind call_kind = kNoCall,
- bool intrinsified = false);
+ explicit LocationSummary(HInstruction* instruction,
+ CallKind call_kind = kNoCall,
+ bool intrinsified = false);
void SetInAt(uint32_t at, Location location) {
inputs_[at] = location;
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 7c3ca5cefa..caecc578c6 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -837,7 +837,7 @@ static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
public:
- HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
+ explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
: graph_(graph),
predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
@@ -6282,7 +6282,7 @@ class HInstanceOf FINAL : public HExpression<2> {
class HBoundType FINAL : public HExpression<1> {
public:
- HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
+ explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
: HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
SetPackedFlag<kFlagUpperCanBeNull>(true);
diff --git a/compiler/utils/arm/assembler_arm.h b/compiler/utils/arm/assembler_arm.h
index 3084e6e2b6..aefbf26db7 100644
--- a/compiler/utils/arm/assembler_arm.h
+++ b/compiler/utils/arm/assembler_arm.h
@@ -246,7 +246,7 @@ class Address : public ValueObject {
NegPostIndex = (0|0|0) << 21 // negative post-indexed with writeback
};
- Address(Register rn, int32_t offset = 0, Mode am = Offset) : rn_(rn), rm_(R0),
+ explicit Address(Register rn, int32_t offset = 0, Mode am = Offset) : rn_(rn), rm_(R0),
offset_(offset),
am_(am), is_immed_offset_(true), shift_(LSL) {
}
diff --git a/compiler/utils/intrusive_forward_list.h b/compiler/utils/intrusive_forward_list.h
index ec2c08722c..b5fc2f2456 100644
--- a/compiler/utils/intrusive_forward_list.h
+++ b/compiler/utils/intrusive_forward_list.h
@@ -59,7 +59,7 @@ class IntrusiveForwardListIterator : public std::iterator<std::forward_iterator_
// Conversion from iterator to const_iterator.
template <typename OtherT,
typename = typename std::enable_if<std::is_same<T, const OtherT>::value>::type>
- IntrusiveForwardListIterator(const IntrusiveForwardListIterator<OtherT, HookTraits>& src)
+ IntrusiveForwardListIterator(const IntrusiveForwardListIterator<OtherT, HookTraits>& src) // NOLINT, implicit
: hook_(src.hook_) { }
// Iteration.
diff --git a/compiler/utils/swap_space.h b/compiler/utils/swap_space.h
index bf06675d72..9600907278 100644
--- a/compiler/utils/swap_space.h
+++ b/compiler/utils/swap_space.h
@@ -114,7 +114,8 @@ class SwapAllocator<void> {
explicit SwapAllocator(SwapSpace* swap_space) : swap_space_(swap_space) {}
template <typename U>
- SwapAllocator(const SwapAllocator<U>& other) : swap_space_(other.swap_space_) {}
+ SwapAllocator(const SwapAllocator<U>& other) // NOLINT, implicit
+ : swap_space_(other.swap_space_) {}
SwapAllocator(const SwapAllocator& other) = default;
SwapAllocator& operator=(const SwapAllocator& other) = default;
@@ -149,7 +150,8 @@ class SwapAllocator {
explicit SwapAllocator(SwapSpace* swap_space) : swap_space_(swap_space) {}
template <typename U>
- SwapAllocator(const SwapAllocator<U>& other) : swap_space_(other.swap_space_) {}
+ SwapAllocator(const SwapAllocator<U>& other) // NOLINT, implicit
+ : swap_space_(other.swap_space_) {}
SwapAllocator(const SwapAllocator& other) = default;
SwapAllocator& operator=(const SwapAllocator& other) = default;
diff --git a/runtime/base/allocator.h b/runtime/base/allocator.h
index e48eca9a2d..8d1c982f3d 100644
--- a/runtime/base/allocator.h
+++ b/runtime/base/allocator.h
@@ -116,7 +116,8 @@ class TrackingAllocatorImpl : public std::allocator<T> {
// Used internally by STL data structures.
template <class U>
- TrackingAllocatorImpl(const TrackingAllocatorImpl<U, kTag>& alloc ATTRIBUTE_UNUSED) noexcept {}
+ TrackingAllocatorImpl( // NOLINT, implicit
+ const TrackingAllocatorImpl<U, kTag>& alloc ATTRIBUTE_UNUSED) noexcept {}
// Used internally by STL data structures.
TrackingAllocatorImpl() noexcept {
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 3fad96b39b..31dbb36821 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -258,9 +258,9 @@ class MemMapArena FINAL : public Arena {
class ArenaPool {
public:
- ArenaPool(bool use_malloc = true,
- bool low_4gb = false,
- const char* name = "LinearAlloc");
+ explicit ArenaPool(bool use_malloc = true,
+ bool low_4gb = false,
+ const char* name = "LinearAlloc");
~ArenaPool();
Arena* AllocArena(size_t size) REQUIRES(!lock_);
void FreeArenaChain(Arena* first) REQUIRES(!lock_);
diff --git a/runtime/base/arena_containers.h b/runtime/base/arena_containers.h
index 68cacd57e6..2c8aa2880a 100644
--- a/runtime/base/arena_containers.h
+++ b/runtime/base/arena_containers.h
@@ -132,7 +132,7 @@ class ArenaAllocatorAdapter<void> : private ArenaAllocatorAdapterKind {
arena_allocator_(arena_allocator) {
}
template <typename U>
- ArenaAllocatorAdapter(const ArenaAllocatorAdapter<U>& other)
+ ArenaAllocatorAdapter(const ArenaAllocatorAdapter<U>& other) // NOLINT, implicit
: ArenaAllocatorAdapterKind(other),
arena_allocator_(other.arena_allocator_) {
}
@@ -168,7 +168,7 @@ class ArenaAllocatorAdapter : private ArenaAllocatorAdapterKind {
arena_allocator_(arena_allocator) {
}
template <typename U>
- ArenaAllocatorAdapter(const ArenaAllocatorAdapter<U>& other)
+ ArenaAllocatorAdapter(const ArenaAllocatorAdapter<U>& other) // NOLINT, implicit
: ArenaAllocatorAdapterKind(other),
arena_allocator_(other.arena_allocator_) {
}
diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h
index bd19d00544..7964705993 100644
--- a/runtime/base/scoped_arena_containers.h
+++ b/runtime/base/scoped_arena_containers.h
@@ -87,7 +87,7 @@ class ScopedArenaAllocatorAdapter<void>
arena_stack_(arena_allocator->arena_stack_) {
}
template <typename U>
- ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
+ ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other) // NOLINT, implicit
: DebugStackReference(other),
DebugStackIndirectTopRef(other),
ArenaAllocatorAdapterKind(other),
@@ -130,7 +130,7 @@ class ScopedArenaAllocatorAdapter
arena_stack_(arena_allocator->arena_stack_) {
}
template <typename U>
- ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
+ ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other) // NOLINT, implicit
: DebugStackReference(other),
DebugStackIndirectTopRef(other),
ArenaAllocatorAdapterKind(other),
diff --git a/runtime/base/transform_array_ref.h b/runtime/base/transform_array_ref.h
index a4e0bc27ed..b432f86d77 100644
--- a/runtime/base/transform_array_ref.h
+++ b/runtime/base/transform_array_ref.h
@@ -72,7 +72,7 @@ class TransformArrayRef {
template <typename OtherBT,
typename = typename std::enable_if<std::is_same<BaseType, const OtherBT>::value>::type>
- TransformArrayRef(const TransformArrayRef<OtherBT, Function>& other)
+ TransformArrayRef(const TransformArrayRef<OtherBT, Function>& other) // NOLINT, implicit
: TransformArrayRef(other.base(), other.GetFunction()) { }
// Assignment operators.
diff --git a/runtime/base/transform_iterator.h b/runtime/base/transform_iterator.h
index 9c8f822b71..f1a8a52ceb 100644
--- a/runtime/base/transform_iterator.h
+++ b/runtime/base/transform_iterator.h
@@ -62,7 +62,7 @@ class TransformIterator {
: data_(base, fn) { }
template <typename OtherBI>
- TransformIterator(const TransformIterator<OtherBI, Function>& other)
+ TransformIterator(const TransformIterator<OtherBI, Function>& other) // NOLINT, implicit
: data_(other.base(), other.GetFunction()) {
}
diff --git a/runtime/experimental_flags.h b/runtime/experimental_flags.h
index 7faa2dc7e3..54d2c35b7c 100644
--- a/runtime/experimental_flags.h
+++ b/runtime/experimental_flags.h
@@ -31,7 +31,8 @@ struct ExperimentalFlags {
};
constexpr ExperimentalFlags() : value_(0x0000) {}
- constexpr ExperimentalFlags(decltype(kNone) t) : value_(static_cast<uint32_t>(t)) {}
+ constexpr ExperimentalFlags(decltype(kNone) t) // NOLINT, implicit
+ : value_(static_cast<uint32_t>(t)) {}
constexpr operator decltype(kNone)() const {
return static_cast<decltype(kNone)>(value_);
diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h
index 1ef0aeac24..53473f0c02 100644
--- a/runtime/gc/collector/concurrent_copying.h
+++ b/runtime/gc/collector/concurrent_copying.h
@@ -61,9 +61,9 @@ class ConcurrentCopying : public GarbageCollector {
// pages.
static constexpr bool kGrayDirtyImmuneObjects = true;
- ConcurrentCopying(Heap* heap,
- const std::string& name_prefix = "",
- bool measure_read_barrier_slow_path = false);
+ explicit ConcurrentCopying(Heap* heap,
+ const std::string& name_prefix = "",
+ bool measure_read_barrier_slow_path = false);
~ConcurrentCopying();
virtual void RunPhases() OVERRIDE
diff --git a/runtime/gc/collector/garbage_collector.h b/runtime/gc/collector/garbage_collector.h
index 6afe876710..4ffa254bb0 100644
--- a/runtime/gc/collector/garbage_collector.h
+++ b/runtime/gc/collector/garbage_collector.h
@@ -44,7 +44,7 @@ class Heap;
namespace collector {
struct ObjectBytePair {
- ObjectBytePair(uint64_t num_objects = 0, int64_t num_bytes = 0)
+ explicit ObjectBytePair(uint64_t num_objects = 0, int64_t num_bytes = 0)
: objects(num_objects), bytes(num_bytes) {}
void Add(const ObjectBytePair& other) {
objects += other.objects;
diff --git a/runtime/mirror/object_reference.h b/runtime/mirror/object_reference.h
index 583cfc3c75..f4a358018e 100644
--- a/runtime/mirror/object_reference.h
+++ b/runtime/mirror/object_reference.h
@@ -55,7 +55,7 @@ class MANAGED ObjectReference {
}
protected:
- ObjectReference<kPoisonReferences, MirrorType>(MirrorType* mirror_ptr)
+ explicit ObjectReference(MirrorType* mirror_ptr)
REQUIRES_SHARED(Locks::mutator_lock_)
: reference_(Compress(mirror_ptr)) {
}
@@ -87,7 +87,7 @@ class MANAGED HeapReference : public ObjectReference<kPoisonHeapReferences, Mirr
return HeapReference<MirrorType>(mirror_ptr);
}
private:
- HeapReference<MirrorType>(MirrorType* mirror_ptr) REQUIRES_SHARED(Locks::mutator_lock_)
+ explicit HeapReference(MirrorType* mirror_ptr) REQUIRES_SHARED(Locks::mutator_lock_)
: ObjectReference<kPoisonHeapReferences, MirrorType>(mirror_ptr) {}
};
@@ -104,7 +104,7 @@ class MANAGED CompressedReference : public mirror::ObjectReference<false, Mirror
}
private:
- CompressedReference<MirrorType>(MirrorType* p) REQUIRES_SHARED(Locks::mutator_lock_)
+ explicit CompressedReference(MirrorType* p) REQUIRES_SHARED(Locks::mutator_lock_)
: mirror::ObjectReference<false, MirrorType>(p) {}
};
diff --git a/runtime/oat.h b/runtime/oat.h
index 35d0c92e84..12a8298771 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -171,7 +171,7 @@ std::ostream& operator<<(std::ostream& os, const OatClassType& rhs);
class PACKED(4) OatMethodOffsets {
public:
- OatMethodOffsets(uint32_t code_offset = 0);
+ explicit OatMethodOffsets(uint32_t code_offset = 0);
~OatMethodOffsets();
diff --git a/runtime/oat_quick_method_header.h b/runtime/oat_quick_method_header.h
index abddc6d7a0..ee5002f84a 100644
--- a/runtime/oat_quick_method_header.h
+++ b/runtime/oat_quick_method_header.h
@@ -30,11 +30,11 @@ class ArtMethod;
// OatQuickMethodHeader precedes the raw code chunk generated by the compiler.
class PACKED(4) OatQuickMethodHeader {
public:
- OatQuickMethodHeader(uint32_t vmap_table_offset = 0U,
- uint32_t frame_size_in_bytes = 0U,
- uint32_t core_spill_mask = 0U,
- uint32_t fp_spill_mask = 0U,
- uint32_t code_size = 0U);
+ explicit OatQuickMethodHeader(uint32_t vmap_table_offset = 0U,
+ uint32_t frame_size_in_bytes = 0U,
+ uint32_t core_spill_mask = 0U,
+ uint32_t fp_spill_mask = 0U,
+ uint32_t code_size = 0U);
~OatQuickMethodHeader();
diff --git a/runtime/thread_list.h b/runtime/thread_list.h
index 5880085576..cef4ed168e 100644
--- a/runtime/thread_list.h
+++ b/runtime/thread_list.h
@@ -209,7 +209,7 @@ class ThreadList {
// Helper for suspending all threads and
class ScopedSuspendAll : public ValueObject {
public:
- ScopedSuspendAll(const char* cause, bool long_suspend = false)
+ explicit ScopedSuspendAll(const char* cause, bool long_suspend = false)
EXCLUSIVE_LOCK_FUNCTION(Locks::mutator_lock_)
REQUIRES(!Locks::thread_list_lock_,
!Locks::thread_suspend_count_lock_,