Remove -Wno-unused-parameter and -Wno-sign-promo from base cflags.
Fix associated errors about unused paramenters and implict sign conversions.
For sign conversion this was largely in the area of enums, so add ostream
operators for the effected enums and fix tools/generate-operator-out.py.
Tidy arena allocation code and arena allocated data types, rather than fixing
new and delete operators.
Remove dead code.
Change-Id: I5b433e722d2f75baacfacae4d32aef4a828bfe1b
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 8e080d1..9fd9a2b 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -265,14 +265,13 @@
}
// Attempt to create 2 mem maps at or after the requested begin.
main_mem_map_1.reset(MapAnonymousPreferredAddress(kMemMapSpaceName[0], request_begin, capacity_,
- PROT_READ | PROT_WRITE, &error_str));
+ &error_str));
CHECK(main_mem_map_1.get() != nullptr) << error_str;
if (support_homogeneous_space_compaction ||
background_collector_type_ == kCollectorTypeSS ||
foreground_collector_type_ == kCollectorTypeSS) {
main_mem_map_2.reset(MapAnonymousPreferredAddress(kMemMapSpaceName[1], main_mem_map_1->End(),
- capacity_, PROT_READ | PROT_WRITE,
- &error_str));
+ capacity_, &error_str));
CHECK(main_mem_map_2.get() != nullptr) << error_str;
}
// Create the non moving space first so that bitmaps don't take up the address range.
@@ -435,8 +434,8 @@
}
}
-MemMap* Heap::MapAnonymousPreferredAddress(const char* name, uint8_t* request_begin, size_t capacity,
- int prot_flags, std::string* out_error_str) {
+MemMap* Heap::MapAnonymousPreferredAddress(const char* name, uint8_t* request_begin,
+ size_t capacity, std::string* out_error_str) {
while (true) {
MemMap* map = MemMap::MapAnonymous(name, request_begin, capacity,
PROT_READ | PROT_WRITE, true, out_error_str);
@@ -887,7 +886,7 @@
if (result != NULL) {
return result;
}
- return FindDiscontinuousSpaceFromObject(obj, true);
+ return FindDiscontinuousSpaceFromObject(obj, fail_ok);
}
space::ImageSpace* Heap::GetImageSpace() const {
@@ -1832,6 +1831,7 @@
virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const {
// Don't sweep any spaces since we probably blasted the internal accounting of the free list
// allocator.
+ UNUSED(space);
return false;
}
@@ -2239,6 +2239,7 @@
void operator()(mirror::Class* klass, mirror::Reference* ref) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ UNUSED(klass);
if (verify_referent_) {
VerifyReference(ref, ref->GetReferent(), mirror::Reference::ReferentOffset());
}
@@ -2583,6 +2584,7 @@
}
void Heap::SwapStacks(Thread* self) {
+ UNUSED(self);
if (kUseThreadLocalAllocationStack) {
live_stack_->AssertAllZero();
}
@@ -2711,6 +2713,7 @@
}
void Heap::PrePauseRosAllocVerification(collector::GarbageCollector* gc) {
+ UNUSED(gc);
// TODO: Add a new runtime option for this?
if (verify_pre_gc_rosalloc_) {
RosAllocVerification(current_gc_iteration_.GetTimings(), "PreGcRosAllocVerification");