Use C++17's [[maybe_unused]] attribute in ART
Bug: 169680875
Test: mmm art
Change-Id: Ic0cc320891c42b07a2b5520a584d2b62052e7235
diff --git a/artd/artd_main.cc b/artd/artd_main.cc
index 3644eba..9fe5bd8 100644
--- a/artd/artd_main.cc
+++ b/artd/artd_main.cc
@@ -22,7 +22,7 @@
#include "android/binder_process.h"
#include "artd.h"
-int main(int argc ATTRIBUTE_UNUSED, char* argv[]) {
+int main([[maybe_unused]] int argc, char* argv[]) {
android::base::InitLogging(argv);
auto artd = ndk::SharedRefBase::make<art::artd::Artd>();
diff --git a/benchmark/golem-tiagent/golem-tiagent.cc b/benchmark/golem-tiagent/golem-tiagent.cc
index be2c727..9fe0644 100644
--- a/benchmark/golem-tiagent/golem-tiagent.cc
+++ b/benchmark/golem-tiagent/golem-tiagent.cc
@@ -35,9 +35,9 @@
}
}
-static void JNICALL VMInitCallback(jvmtiEnv *jenv ATTRIBUTE_UNUSED,
+static void JNICALL VMInitCallback([[maybe_unused]] jvmtiEnv* jenv,
JNIEnv* jni_env,
- jthread thread ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jthread thread) {
// Set a breakpoint on a rare method that we won't expect to be hit.
// java.lang.Thread.stop is deprecated and not expected to be used.
jclass cl = jni_env->FindClass("java/lang/Thread");
@@ -57,8 +57,8 @@
}
extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
// Setup jvmti_env
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0) != 0) {
LOG(ERROR) << "Unable to get jvmti env!";
diff --git a/cmdline/cmdline.h b/cmdline/cmdline.h
index b8ca7d0..2108a7a 100644
--- a/cmdline/cmdline.h
+++ b/cmdline/cmdline.h
@@ -321,15 +321,13 @@
}
protected:
- virtual ParseStatus ParseCustom(const char* raw_option ATTRIBUTE_UNUSED,
- size_t raw_option_length ATTRIBUTE_UNUSED,
- std::string* error_msg ATTRIBUTE_UNUSED) {
+ virtual ParseStatus ParseCustom([[maybe_unused]] const char* raw_option,
+ [[maybe_unused]] size_t raw_option_length,
+ [[maybe_unused]] std::string* error_msg) {
return kParseUnknownArgument;
}
- virtual ParseStatus ParseChecks(std::string* error_msg ATTRIBUTE_UNUSED) {
- return kParseOk;
- }
+ virtual ParseStatus ParseChecks([[maybe_unused]] std::string* error_msg) { return kParseOk; }
};
template <typename Args = CmdlineArgs>
diff --git a/cmdline/cmdline_parser_test.cc b/cmdline/cmdline_parser_test.cc
index effbee9..4d53857 100644
--- a/cmdline/cmdline_parser_test.cc
+++ b/cmdline/cmdline_parser_test.cc
@@ -75,12 +75,13 @@
// that are nevertheless equal.
// If a test is failing because the structs aren't "equal" when they really are
// then it's recommended to implement operator== for it instead.
- template <typename T, typename ... Ignore>
- bool UsuallyEquals(const T& expected, const T& actual,
- const Ignore& ... more ATTRIBUTE_UNUSED,
- typename std::enable_if<std::is_pod<T>::value>::type* = nullptr,
- typename std::enable_if<!detail::SupportsEqualityOperator<T>::value>::type* = nullptr
- ) {
+ template <typename T, typename... Ignore>
+ bool UsuallyEquals(
+ const T& expected,
+ const T& actual,
+ [[maybe_unused]] const Ignore&... more,
+ typename std::enable_if<std::is_pod<T>::value>::type* = nullptr,
+ typename std::enable_if<!detail::SupportsEqualityOperator<T>::value>::type* = nullptr) {
return memcmp(std::addressof(expected), std::addressof(actual), sizeof(T)) == 0;
}
diff --git a/cmdline/cmdline_type_parser.h b/cmdline/cmdline_type_parser.h
index 82a76f4..10e28f3 100644
--- a/cmdline/cmdline_type_parser.h
+++ b/cmdline/cmdline_type_parser.h
@@ -34,7 +34,7 @@
//
// e.g. if the argument definition was "foo:_", and the user-provided input was "foo:bar",
// then args is "bar".
- Result Parse(const std::string& args ATTRIBUTE_UNUSED) {
+ Result Parse([[maybe_unused]] const std::string& args) {
assert(false);
return Result::Failure("Missing type specialization and/or value map");
}
@@ -46,8 +46,8 @@
//
// If the initial value does not exist yet, a default value is created by
// value-initializing with 'T()'.
- Result ParseAndAppend(const std::string& args ATTRIBUTE_UNUSED,
- T& existing_value ATTRIBUTE_UNUSED) {
+ Result ParseAndAppend([[maybe_unused]] const std::string& args,
+ [[maybe_unused]] T& existing_value) {
assert(false);
return Result::Failure("Missing type specialization and/or value map");
}
diff --git a/cmdline/token_range.h b/cmdline/token_range.h
index e917e1d..f662ca6 100644
--- a/cmdline/token_range.h
+++ b/cmdline/token_range.h
@@ -55,7 +55,7 @@
#if 0
// Copying-from-vector constructor.
- TokenRange(const TokenList& token_list ATTRIBUTE_UNUSED,
+ TokenRange([[maybe_unused]] const TokenList& token_list,
TokenList::const_iterator it_begin,
TokenList::const_iterator it_end)
: token_list_(new TokenList(it_begin, it_end)),
diff --git a/compiler/common_compiler_test.cc b/compiler/common_compiler_test.cc
index 442b96e..a37f516 100644
--- a/compiler/common_compiler_test.cc
+++ b/compiler/common_compiler_test.cc
@@ -133,9 +133,9 @@
CompiledMethod* CreateCompiledMethod(InstructionSet instruction_set,
ArrayRef<const uint8_t> code,
ArrayRef<const uint8_t> stack_map,
- ArrayRef<const uint8_t> cfi ATTRIBUTE_UNUSED,
+ [[maybe_unused]] ArrayRef<const uint8_t> cfi,
ArrayRef<const linker::LinkerPatch> patches,
- bool is_intrinsic ATTRIBUTE_UNUSED) override {
+ [[maybe_unused]] bool is_intrinsic) override {
// Supports only one method at a time.
CHECK_EQ(instruction_set_, InstructionSet::kNone);
CHECK_NE(instruction_set, InstructionSet::kNone);
@@ -150,15 +150,15 @@
return reinterpret_cast<CompiledMethod*>(this);
}
- ArrayRef<const uint8_t> GetThunkCode(const linker::LinkerPatch& patch ATTRIBUTE_UNUSED,
- /*out*/ std::string* debug_name ATTRIBUTE_UNUSED) override {
+ ArrayRef<const uint8_t> GetThunkCode([[maybe_unused]] const linker::LinkerPatch& patch,
+ [[maybe_unused]] /*out*/ std::string* debug_name) override {
LOG(FATAL) << "Unsupported.";
UNREACHABLE();
}
- void SetThunkCode(const linker::LinkerPatch& patch ATTRIBUTE_UNUSED,
- ArrayRef<const uint8_t> code ATTRIBUTE_UNUSED,
- const std::string& debug_name ATTRIBUTE_UNUSED) override {
+ void SetThunkCode([[maybe_unused]] const linker::LinkerPatch& patch,
+ [[maybe_unused]] ArrayRef<const uint8_t> code,
+ [[maybe_unused]] const std::string& debug_name) override {
LOG(FATAL) << "Unsupported.";
UNREACHABLE();
}
diff --git a/compiler/compiler.h b/compiler/compiler.h
index ce785bb..6c317f7 100644
--- a/compiler/compiler.h
+++ b/compiler/compiler.h
@@ -73,12 +73,12 @@
const DexFile& dex_file,
Handle<mirror::DexCache> dex_cache) const = 0;
- virtual bool JitCompile(Thread* self ATTRIBUTE_UNUSED,
- jit::JitCodeCache* code_cache ATTRIBUTE_UNUSED,
- jit::JitMemoryRegion* region ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- CompilationKind compilation_kind ATTRIBUTE_UNUSED,
- jit::JitLogger* jit_logger ATTRIBUTE_UNUSED)
+ virtual bool JitCompile([[maybe_unused]] Thread* self,
+ [[maybe_unused]] jit::JitCodeCache* code_cache,
+ [[maybe_unused]] jit::JitMemoryRegion* region,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] CompilationKind compilation_kind,
+ [[maybe_unused]] jit::JitLogger* jit_logger)
REQUIRES_SHARED(Locks::mutator_lock_) {
return false;
}
diff --git a/compiler/debug/elf_debug_writer.cc b/compiler/debug/elf_debug_writer.cc
index 8f64d73..505b6c5 100644
--- a/compiler/debug/elf_debug_writer.cc
+++ b/compiler/debug/elf_debug_writer.cc
@@ -113,7 +113,7 @@
template <typename ElfTypes>
static std::vector<uint8_t> MakeMiniDebugInfoInternal(
InstructionSet isa,
- const InstructionSetFeatures* features ATTRIBUTE_UNUSED,
+ [[maybe_unused]] const InstructionSetFeatures* features,
typename ElfTypes::Addr text_section_address,
size_t text_section_size,
typename ElfTypes::Addr dex_section_address,
@@ -172,11 +172,10 @@
}
}
-std::vector<uint8_t> MakeElfFileForJIT(
- InstructionSet isa,
- const InstructionSetFeatures* features ATTRIBUTE_UNUSED,
- bool mini_debug_info,
- const MethodDebugInfo& method_info) {
+std::vector<uint8_t> MakeElfFileForJIT(InstructionSet isa,
+ [[maybe_unused]] const InstructionSetFeatures* features,
+ bool mini_debug_info,
+ const MethodDebugInfo& method_info) {
using ElfTypes = ElfRuntimeTypes;
CHECK_EQ(sizeof(ElfTypes::Addr), static_cast<size_t>(GetInstructionSetPointerSize(isa)));
CHECK_EQ(method_info.is_code_address_text_relative, false);
@@ -213,13 +212,12 @@
DCHECK_EQ(sym.st_size, method_info.code_size);
num_syms++;
});
- reader.VisitDebugFrame([&](const Reader::CIE* cie ATTRIBUTE_UNUSED) {
- num_cies++;
- }, [&](const Reader::FDE* fde, const Reader::CIE* cie ATTRIBUTE_UNUSED) {
- DCHECK_EQ(fde->sym_addr, method_info.code_address);
- DCHECK_EQ(fde->sym_size, method_info.code_size);
- num_fdes++;
- });
+ reader.VisitDebugFrame([&]([[maybe_unused]] const Reader::CIE* cie) { num_cies++; },
+ [&](const Reader::FDE* fde, [[maybe_unused]] const Reader::CIE* cie) {
+ DCHECK_EQ(fde->sym_addr, method_info.code_address);
+ DCHECK_EQ(fde->sym_size, method_info.code_size);
+ num_fdes++;
+ });
DCHECK_EQ(num_syms, 1u);
DCHECK_LE(num_cies, 1u);
DCHECK_LE(num_fdes, 1u);
@@ -302,18 +300,20 @@
// ART always produces the same CIE, so we copy the first one and ignore the rest.
bool copied_cie = false;
for (Reader& reader : readers) {
- reader.VisitDebugFrame([&](const Reader::CIE* cie) {
- if (!copied_cie) {
- debug_frame->WriteFully(cie->data(), cie->size());
- copied_cie = true;
- }
- }, [&](const Reader::FDE* fde, const Reader::CIE* cie ATTRIBUTE_UNUSED) {
- DCHECK(copied_cie);
- DCHECK_EQ(fde->cie_pointer, 0);
- if (!is_removed_symbol(fde->sym_addr)) {
- debug_frame->WriteFully(fde->data(), fde->size());
- }
- });
+ reader.VisitDebugFrame(
+ [&](const Reader::CIE* cie) {
+ if (!copied_cie) {
+ debug_frame->WriteFully(cie->data(), cie->size());
+ copied_cie = true;
+ }
+ },
+ [&](const Reader::FDE* fde, [[maybe_unused]] const Reader::CIE* cie) {
+ DCHECK(copied_cie);
+ DCHECK_EQ(fde->cie_pointer, 0);
+ if (!is_removed_symbol(fde->sym_addr)) {
+ debug_frame->WriteFully(fde->data(), fde->size());
+ }
+ });
}
debug_frame->End();
@@ -348,9 +348,8 @@
std::vector<uint8_t> WriteDebugElfFileForClasses(
InstructionSet isa,
- const InstructionSetFeatures* features ATTRIBUTE_UNUSED,
- const ArrayRef<mirror::Class*>& types)
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] const InstructionSetFeatures* features,
+ const ArrayRef<mirror::Class*>& types) REQUIRES_SHARED(Locks::mutator_lock_) {
using ElfTypes = ElfRuntimeTypes;
CHECK_EQ(sizeof(ElfTypes::Addr), static_cast<size_t>(GetInstructionSetPointerSize(isa)));
std::vector<uint8_t> buffer;
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
index 2fd7a6b..b99e78f 100644
--- a/compiler/jni/jni_compiler_test.cc
+++ b/compiler/jni/jni_compiler_test.cc
@@ -175,9 +175,8 @@
// SFINAE for non-ref-types. Always 0.
template <typename T>
-size_t count_nonnull_refs_single_helper(T arg ATTRIBUTE_UNUSED,
- typename std::enable_if<!jni_type_traits<T>::is_ref>::type*
- = nullptr) {
+size_t count_nonnull_refs_single_helper(
+ [[maybe_unused]] T arg, typename std::enable_if<!jni_type_traits<T>::is_ref>::type* = nullptr) {
return 0;
}
@@ -597,10 +596,9 @@
class CountReferencesVisitor : public RootVisitor {
public:
- void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED,
+ void VisitRoots([[maybe_unused]] mirror::Object*** roots,
size_t count,
- const RootInfo& info) override
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ const RootInfo& info) override REQUIRES_SHARED(Locks::mutator_lock_) {
if (info.GetType() == art::RootType::kRootJavaFrame) {
const JavaFrameRootInfo& jrfi = static_cast<const JavaFrameRootInfo&>(info);
if (jrfi.GetVReg() == JavaFrameRootInfo::kNativeReferenceArgument) {
@@ -610,10 +608,9 @@
}
}
- void VisitRoots(mirror::CompressedReference<mirror::Object>** roots ATTRIBUTE_UNUSED,
- size_t count ATTRIBUTE_UNUSED,
- const RootInfo& info) override
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ void VisitRoots([[maybe_unused]] mirror::CompressedReference<mirror::Object>** roots,
+ [[maybe_unused]] size_t count,
+ const RootInfo& info) override REQUIRES_SHARED(Locks::mutator_lock_) {
CHECK_NE(info.GetType(), art::RootType::kRootJavaFrame);
}
@@ -986,8 +983,8 @@
JNI_TEST(CompileAndRunIntObjectObjectMethod)
int gJava_MyClassNatives_fooSII_calls[kJniKindCount] = {};
-jint Java_MyClassNatives_fooSII(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED,
+jint Java_MyClassNatives_fooSII([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass,
jint x,
jint y) {
gJava_MyClassNatives_fooSII_calls[gCurrentJni]++;
@@ -1009,8 +1006,8 @@
JNI_TEST_CRITICAL(CompileAndRunStaticIntIntMethod)
int gJava_MyClassNatives_fooSDD_calls[kJniKindCount] = {};
-jdouble Java_MyClassNatives_fooSDD(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED,
+jdouble Java_MyClassNatives_fooSDD([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass,
jdouble x,
jdouble y) {
gJava_MyClassNatives_fooSDD_calls[gCurrentJni]++;
@@ -1682,8 +1679,8 @@
JNI_TEST(CompileAndRunFloatFloatMethod)
-void Java_MyClassNatives_checkParameterAlign(JNIEnv* env ATTRIBUTE_UNUSED,
- jobject thisObj ATTRIBUTE_UNUSED,
+void Java_MyClassNatives_checkParameterAlign([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jobject thisObj,
jint i1,
jlong l1) {
EXPECT_EQ(i1, 1234);
diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
index 9d0761d..0f981dd 100644
--- a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
+++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
@@ -117,7 +117,7 @@
return scratch_regs;
}
-static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni ATTRIBUTE_UNUSED) {
+static ManagedRegister ReturnRegisterForShorty(const char* shorty, [[maybe_unused]] bool jni) {
if (shorty[0] == 'F' || shorty[0] == 'D') {
return X86_64ManagedRegister::FromXmmRegister(XMM0);
} else if (shorty[0] == 'J') {
diff --git a/compiler/linker/output_stream_test.cc b/compiler/linker/output_stream_test.cc
index 22b174f..6b62874 100644
--- a/compiler/linker/output_stream_test.cc
+++ b/compiler/linker/output_stream_test.cc
@@ -107,13 +107,13 @@
flush_called(false) { }
~CheckingOutputStream() override {}
- bool WriteFully(const void* buffer ATTRIBUTE_UNUSED,
- size_t byte_count ATTRIBUTE_UNUSED) override {
+ bool WriteFully([[maybe_unused]] const void* buffer,
+ [[maybe_unused]] size_t byte_count) override {
LOG(FATAL) << "UNREACHABLE";
UNREACHABLE();
}
- off_t Seek(off_t offset ATTRIBUTE_UNUSED, Whence whence ATTRIBUTE_UNUSED) override {
+ off_t Seek([[maybe_unused]] off_t offset, [[maybe_unused]] Whence whence) override {
LOG(FATAL) << "UNREACHABLE";
UNREACHABLE();
}
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index f90f17f..cc8cb89 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -288,8 +288,8 @@
return code_generation_data_->GetJitClassRootIndex(type_reference);
}
-void CodeGenerator::EmitJitRootPatches(uint8_t* code ATTRIBUTE_UNUSED,
- const uint8_t* roots_data ATTRIBUTE_UNUSED) {
+void CodeGenerator::EmitJitRootPatches([[maybe_unused]] uint8_t* code,
+ [[maybe_unused]] const uint8_t* roots_data) {
DCHECK(code_generation_data_ != nullptr);
DCHECK_EQ(code_generation_data_->GetNumberOfJitStringRoots(), 0u);
DCHECK_EQ(code_generation_data_->GetNumberOfJitClassRoots(), 0u);
@@ -457,18 +457,18 @@
}
void CodeGenerator::EmitLinkerPatches(
- ArenaVector<linker::LinkerPatch>* linker_patches ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] ArenaVector<linker::LinkerPatch>* linker_patches) {
// No linker patches by default.
}
-bool CodeGenerator::NeedsThunkCode(const linker::LinkerPatch& patch ATTRIBUTE_UNUSED) const {
+bool CodeGenerator::NeedsThunkCode([[maybe_unused]] const linker::LinkerPatch& patch) const {
// Code generators that create patches requiring thunk compilation should override this function.
return false;
}
-void CodeGenerator::EmitThunkCode(const linker::LinkerPatch& patch ATTRIBUTE_UNUSED,
- /*out*/ ArenaVector<uint8_t>* code ATTRIBUTE_UNUSED,
- /*out*/ std::string* debug_name ATTRIBUTE_UNUSED) {
+void CodeGenerator::EmitThunkCode([[maybe_unused]] const linker::LinkerPatch& patch,
+ [[maybe_unused]] /*out*/ ArenaVector<uint8_t>* code,
+ [[maybe_unused]] /*out*/ std::string* debug_name) {
// Code generators that create patches requiring thunk compilation should override this function.
LOG(FATAL) << "Unexpected call to EmitThunkCode().";
}
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index d530d08..fe81b31 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1572,15 +1572,15 @@
return kArm64WordSize;
}
-size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
- uint32_t reg_id ATTRIBUTE_UNUSED) {
+size_t CodeGeneratorARM64::SaveFloatingPointRegister([[maybe_unused]] size_t stack_index,
+ [[maybe_unused]] uint32_t reg_id) {
LOG(FATAL) << "FP registers shouldn't be saved/restored individually, "
<< "use SaveRestoreLiveRegistersHelper";
UNREACHABLE();
}
-size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
- uint32_t reg_id ATTRIBUTE_UNUSED) {
+size_t CodeGeneratorARM64::RestoreFloatingPointRegister([[maybe_unused]] size_t stack_index,
+ [[maybe_unused]] uint32_t reg_id) {
LOG(FATAL) << "FP registers shouldn't be saved/restored individually, "
<< "use SaveRestoreLiveRegistersHelper";
UNREACHABLE();
@@ -3687,7 +3687,7 @@
}
void InstructionCodeGeneratorARM64::VisitDoubleConstant(
- HDoubleConstant* constant ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HDoubleConstant* constant) {
// Will be generated at use site.
}
@@ -3695,8 +3695,7 @@
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
-}
+void InstructionCodeGeneratorARM64::VisitExit([[maybe_unused]] HExit* exit) {}
void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
LocationSummary* locations =
@@ -3704,7 +3703,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARM64::VisitFloatConstant([[maybe_unused]] HFloatConstant* constant) {
// Will be generated at use site.
}
@@ -4565,7 +4564,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARM64::VisitIntConstant([[maybe_unused]] HIntConstant* constant) {
// Will be generated at use site.
}
@@ -4574,7 +4573,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARM64::VisitNullConstant([[maybe_unused]] HNullConstant* constant) {
// Will be generated at use site.
}
@@ -4749,8 +4748,8 @@
}
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
- const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- ArtMethod* method ATTRIBUTE_UNUSED) {
+ const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
+ [[maybe_unused]] ArtMethod* method) {
// On ARM64 we support all dispatch types.
return desired_dispatch_info;
}
@@ -5639,7 +5638,7 @@
new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
}
-void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARM64::VisitClearException([[maybe_unused]] HClearException* clear) {
__ Str(wzr, GetExceptionTlsAddress());
}
@@ -5769,7 +5768,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARM64::VisitLongConstant([[maybe_unused]] HLongConstant* constant) {
// Will be generated at use site.
}
@@ -5969,7 +5968,7 @@
HandleBinaryOp(instruction);
}
-void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
+void LocationsBuilderARM64::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) {
LOG(FATAL) << "Unreachable";
}
@@ -5996,7 +5995,7 @@
}
void InstructionCodeGeneratorARM64::VisitParameterValue(
- HParameterValue* instruction ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HParameterValue* instruction) {
// Nothing to do, the parameter is already at its location.
}
@@ -6007,7 +6006,7 @@
}
void InstructionCodeGeneratorARM64::VisitCurrentMethod(
- HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HCurrentMethod* instruction) {
// Nothing to do, the method is already at its location.
}
@@ -6019,7 +6018,7 @@
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARM64::VisitPhi([[maybe_unused]] HPhi* instruction) {
LOG(FATAL) << "Unreachable";
}
@@ -6214,7 +6213,7 @@
}
void InstructionCodeGeneratorARM64::VisitConstructorFence(
- HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HConstructorFence* constructor_fence) {
codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
}
@@ -6254,7 +6253,7 @@
instruction->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARM64::VisitReturnVoid([[maybe_unused]] HReturnVoid* instruction) {
codegen_->GenerateFrameExit();
}
@@ -6506,12 +6505,12 @@
HandleBinaryOp(instruction);
}
-void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
+void LocationsBuilderARM64::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
// Nothing to do, this should be removed during prepare for register allocator.
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARM64::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
// Nothing to do, this should be removed during prepare for register allocator.
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 6190364..b256307 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -304,16 +304,16 @@
Location GetFieldIndexLocation() const override {
return helpers::LocationFrom(vixl::aarch64::x0);
}
- Location GetReturnLocation(DataType::Type type ATTRIBUTE_UNUSED) const override {
+ Location GetReturnLocation([[maybe_unused]] DataType::Type type) const override {
return helpers::LocationFrom(vixl::aarch64::x0);
}
- Location GetSetValueLocation(DataType::Type type ATTRIBUTE_UNUSED,
+ Location GetSetValueLocation([[maybe_unused]] DataType::Type type,
bool is_instance) const override {
return is_instance
? helpers::LocationFrom(vixl::aarch64::x2)
: helpers::LocationFrom(vixl::aarch64::x1);
}
- Location GetFpuLocation(DataType::Type type ATTRIBUTE_UNUSED) const override {
+ Location GetFpuLocation([[maybe_unused]] DataType::Type type) const override {
return helpers::LocationFrom(vixl::aarch64::d0);
}
@@ -737,9 +737,7 @@
ParallelMoveResolverARM64* GetMoveResolver() override { return &move_resolver_; }
- bool NeedsTwoRegisters(DataType::Type type ATTRIBUTE_UNUSED) const override {
- return false;
- }
+ bool NeedsTwoRegisters([[maybe_unused]] DataType::Type type) const override { return false; }
// Check if the desired_string_load_kind is supported. If it is, return it,
// otherwise return a fall-back kind that should be used instead.
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index ecc7a68..b61f6b5 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -1103,27 +1103,27 @@
}
// Saves the register in the stack. Returns the size taken on stack.
-size_t CodeGeneratorARMVIXL::SaveCoreRegister(size_t stack_index ATTRIBUTE_UNUSED,
- uint32_t reg_id ATTRIBUTE_UNUSED) {
+size_t CodeGeneratorARMVIXL::SaveCoreRegister([[maybe_unused]] size_t stack_index,
+ [[maybe_unused]] uint32_t reg_id) {
TODO_VIXL32(FATAL);
UNREACHABLE();
}
// Restores the register from the stack. Returns the size taken on stack.
-size_t CodeGeneratorARMVIXL::RestoreCoreRegister(size_t stack_index ATTRIBUTE_UNUSED,
- uint32_t reg_id ATTRIBUTE_UNUSED) {
+size_t CodeGeneratorARMVIXL::RestoreCoreRegister([[maybe_unused]] size_t stack_index,
+ [[maybe_unused]] uint32_t reg_id) {
TODO_VIXL32(FATAL);
UNREACHABLE();
}
-size_t CodeGeneratorARMVIXL::SaveFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
- uint32_t reg_id ATTRIBUTE_UNUSED) {
+size_t CodeGeneratorARMVIXL::SaveFloatingPointRegister([[maybe_unused]] size_t stack_index,
+ [[maybe_unused]] uint32_t reg_id) {
TODO_VIXL32(FATAL);
UNREACHABLE();
}
-size_t CodeGeneratorARMVIXL::RestoreFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED,
- uint32_t reg_id ATTRIBUTE_UNUSED) {
+size_t CodeGeneratorARMVIXL::RestoreFloatingPointRegister([[maybe_unused]] size_t stack_index,
+ [[maybe_unused]] uint32_t reg_id) {
TODO_VIXL32(FATAL);
UNREACHABLE();
}
@@ -2873,8 +2873,7 @@
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARMVIXL::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
-}
+void InstructionCodeGeneratorARMVIXL::VisitExit([[maybe_unused]] HExit* exit) {}
void InstructionCodeGeneratorARMVIXL::GenerateCompareTestAndBranch(HCondition* condition,
vixl32::Label* true_target,
@@ -3471,7 +3470,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARMVIXL::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARMVIXL::VisitIntConstant([[maybe_unused]] HIntConstant* constant) {
// Will be generated at use site.
}
@@ -3481,7 +3480,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARMVIXL::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARMVIXL::VisitNullConstant([[maybe_unused]] HNullConstant* constant) {
// Will be generated at use site.
}
@@ -3491,7 +3490,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorARMVIXL::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARMVIXL::VisitLongConstant([[maybe_unused]] HLongConstant* constant) {
// Will be generated at use site.
}
@@ -3502,7 +3501,7 @@
}
void InstructionCodeGeneratorARMVIXL::VisitFloatConstant(
- HFloatConstant* constant ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HFloatConstant* constant) {
// Will be generated at use site.
}
@@ -3513,7 +3512,7 @@
}
void InstructionCodeGeneratorARMVIXL::VisitDoubleConstant(
- HDoubleConstant* constant ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HDoubleConstant* constant) {
// Will be generated at use site.
}
@@ -3522,7 +3521,7 @@
}
void InstructionCodeGeneratorARMVIXL::VisitConstructorFence(
- HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HConstructorFence* constructor_fence) {
codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
}
@@ -3538,7 +3537,7 @@
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorARMVIXL::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARMVIXL::VisitReturnVoid([[maybe_unused]] HReturnVoid* ret) {
codegen_->GenerateFrameExit();
}
@@ -5666,7 +5665,7 @@
}
void InstructionCodeGeneratorARMVIXL::VisitParameterValue(
- HParameterValue* instruction ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HParameterValue* instruction) {
// Nothing to do, the parameter is already at its location.
}
@@ -5677,7 +5676,7 @@
}
void InstructionCodeGeneratorARMVIXL::VisitCurrentMethod(
- HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HCurrentMethod* instruction) {
// Nothing to do, the method is already at its location.
}
@@ -5818,7 +5817,7 @@
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorARMVIXL::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARMVIXL::VisitPhi([[maybe_unused]] HPhi* instruction) {
LOG(FATAL) << "Unreachable";
}
@@ -7282,7 +7281,7 @@
}
}
-void LocationsBuilderARMVIXL::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
+void LocationsBuilderARMVIXL::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) {
LOG(FATAL) << "Unreachable";
}
@@ -7991,7 +7990,7 @@
new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
}
-void InstructionCodeGeneratorARMVIXL::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARMVIXL::VisitClearException([[maybe_unused]] HClearException* clear) {
UseScratchRegisterScope temps(GetVIXLAssembler());
vixl32::Register temp = temps.Acquire();
__ Mov(temp, 0);
@@ -9914,12 +9913,12 @@
}
}
-void LocationsBuilderARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
+void LocationsBuilderARMVIXL::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
// Nothing to do, this should be removed during prepare for register allocator.
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorARMVIXL::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
// Nothing to do, this should be removed during prepare for register allocator.
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index bcf5ea0..3ae6515 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1919,8 +1919,7 @@
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
-}
+void InstructionCodeGeneratorX86::VisitExit([[maybe_unused]] HExit* exit) {}
template<class LabelType>
void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
@@ -2560,7 +2559,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitIntConstant([[maybe_unused]] HIntConstant* constant) {
// Will be generated at use site.
}
@@ -2570,7 +2569,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitNullConstant([[maybe_unused]] HNullConstant* constant) {
// Will be generated at use site.
}
@@ -2580,7 +2579,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitLongConstant([[maybe_unused]] HLongConstant* constant) {
// Will be generated at use site.
}
@@ -2590,7 +2589,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitFloatConstant([[maybe_unused]] HFloatConstant* constant) {
// Will be generated at use site.
}
@@ -2600,7 +2599,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitDoubleConstant([[maybe_unused]] HDoubleConstant* constant) {
// Will be generated at use site.
}
@@ -2609,7 +2608,7 @@
}
void InstructionCodeGeneratorX86::VisitConstructorFence(
- HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HConstructorFence* constructor_fence) {
codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
}
@@ -2625,7 +2624,7 @@
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitReturnVoid([[maybe_unused]] HReturnVoid* ret) {
codegen_->GenerateFrameExit();
}
@@ -5140,8 +5139,7 @@
}
void InstructionCodeGeneratorX86::VisitParameterValue(
- HParameterValue* instruction ATTRIBUTE_UNUSED) {
-}
+ [[maybe_unused]] HParameterValue* instruction) {}
void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) {
LocationSummary* locations =
@@ -5149,7 +5147,7 @@
locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
}
-void InstructionCodeGeneratorX86::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitCurrentMethod([[maybe_unused]] HCurrentMethod* instruction) {
}
void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) {
@@ -5348,7 +5346,7 @@
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitPhi([[maybe_unused]] HPhi* instruction) {
LOG(FATAL) << "Unreachable";
}
@@ -5377,8 +5375,8 @@
}
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
- const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- ArtMethod* method ATTRIBUTE_UNUSED) {
+ const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
+ [[maybe_unused]] ArtMethod* method) {
return desired_dispatch_info;
}
@@ -6803,7 +6801,7 @@
}
}
-void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
+void LocationsBuilderX86::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) {
LOG(FATAL) << "Unreachable";
}
@@ -7551,7 +7549,7 @@
new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
}
-void InstructionCodeGeneratorX86::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitClearException([[maybe_unused]] HClearException* clear) {
__ fs()->movl(GetExceptionTlsAddress(), Immediate(0));
}
@@ -8637,12 +8635,12 @@
__ Bind(slow_path->GetExitLabel());
}
-void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
+void LocationsBuilderX86::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
// Nothing to do, this should be removed during prepare for register allocator.
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
// Nothing to do, this should be removed during prepare for register allocator.
LOG(FATAL) << "Unreachable";
}
@@ -9097,13 +9095,13 @@
}
}
-void LocationsBuilderX86::VisitIntermediateAddress(HIntermediateAddress* instruction
- ATTRIBUTE_UNUSED) {
+void LocationsBuilderX86::VisitIntermediateAddress(
+ [[maybe_unused]] HIntermediateAddress* instruction) {
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorX86::VisitIntermediateAddress(HIntermediateAddress* instruction
- ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86::VisitIntermediateAddress(
+ [[maybe_unused]] HIntermediateAddress* instruction) {
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index d27155f..0905f32 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -196,7 +196,7 @@
? Location::RegisterLocation(EDX)
: Location::RegisterLocation(ECX));
}
- Location GetFpuLocation(DataType::Type type ATTRIBUTE_UNUSED) const override {
+ Location GetFpuLocation([[maybe_unused]] DataType::Type type) const override {
return Location::FpuRegisterLocation(XMM0);
}
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 2e03f1f..7c61519 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1072,8 +1072,8 @@
}
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
- const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- ArtMethod* method ATTRIBUTE_UNUSED) {
+ const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
+ [[maybe_unused]] ArtMethod* method) {
return desired_dispatch_info;
}
@@ -2002,8 +2002,9 @@
Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
}
-void CodeGeneratorX86_64::MoveLocation(
- Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
+void CodeGeneratorX86_64::MoveLocation(Location dst,
+ Location src,
+ [[maybe_unused]] DataType::Type dst_type) {
Move(dst, src);
}
@@ -2062,8 +2063,7 @@
exit->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
-}
+void InstructionCodeGeneratorX86_64::VisitExit([[maybe_unused]] HExit* exit) {}
template<class LabelType>
void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
@@ -2710,7 +2710,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitIntConstant([[maybe_unused]] HIntConstant* constant) {
// Will be generated at use site.
}
@@ -2720,7 +2720,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitNullConstant([[maybe_unused]] HNullConstant* constant) {
// Will be generated at use site.
}
@@ -2730,7 +2730,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitLongConstant([[maybe_unused]] HLongConstant* constant) {
// Will be generated at use site.
}
@@ -2740,7 +2740,7 @@
locations->SetOut(Location::ConstantLocation(constant));
}
-void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitFloatConstant([[maybe_unused]] HFloatConstant* constant) {
// Will be generated at use site.
}
@@ -2751,7 +2751,7 @@
}
void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
- HDoubleConstant* constant ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HDoubleConstant* constant) {
// Will be generated at use site.
}
@@ -2760,7 +2760,7 @@
}
void InstructionCodeGeneratorX86_64::VisitConstructorFence(
- HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HConstructorFence* constructor_fence) {
codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
}
@@ -2776,7 +2776,7 @@
ret->SetLocations(nullptr);
}
-void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitReturnVoid([[maybe_unused]] HReturnVoid* ret) {
codegen_->GenerateFrameExit();
}
@@ -5025,7 +5025,7 @@
}
void InstructionCodeGeneratorX86_64::VisitParameterValue(
- HParameterValue* instruction ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HParameterValue* instruction) {
// Nothing to do, the parameter is already at its location.
}
@@ -5036,7 +5036,7 @@
}
void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
- HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HCurrentMethod* instruction) {
// Nothing to do, the method is already at its location.
}
@@ -5115,7 +5115,7 @@
locations->SetOut(Location::Any());
}
-void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitPhi([[maybe_unused]] HPhi* instruction) {
LOG(FATAL) << "Unimplemented";
}
@@ -6136,7 +6136,7 @@
}
}
-void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
+void LocationsBuilderX86_64::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) {
LOG(FATAL) << "Unimplemented";
}
@@ -6811,7 +6811,7 @@
new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
}
-void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitClearException([[maybe_unused]] HClearException* clear) {
__ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
}
@@ -7881,12 +7881,12 @@
__ Bind(slow_path->GetExitLabel());
}
-void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
+void LocationsBuilderX86_64::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
// Nothing to do, this should be removed during prepare for register allocator.
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitBoundType([[maybe_unused]] HBoundType* instruction) {
// Nothing to do, this should be removed during prepare for register allocator.
LOG(FATAL) << "Unreachable";
}
@@ -7981,13 +7981,13 @@
__ jmp(temp_reg);
}
-void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
- ATTRIBUTE_UNUSED) {
+void LocationsBuilderX86_64::VisitIntermediateAddress(
+ [[maybe_unused]] HIntermediateAddress* instruction) {
LOG(FATAL) << "Unreachable";
}
-void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
- ATTRIBUTE_UNUSED) {
+void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(
+ [[maybe_unused]] HIntermediateAddress* instruction) {
LOG(FATAL) << "Unreachable";
}
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index dff2e79..e1ce3a9 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -162,16 +162,16 @@
Location GetFieldIndexLocation() const override {
return Location::RegisterLocation(RDI);
}
- Location GetReturnLocation(DataType::Type type ATTRIBUTE_UNUSED) const override {
+ Location GetReturnLocation([[maybe_unused]] DataType::Type type) const override {
return Location::RegisterLocation(RAX);
}
- Location GetSetValueLocation(DataType::Type type ATTRIBUTE_UNUSED, bool is_instance)
- const override {
+ Location GetSetValueLocation([[maybe_unused]] DataType::Type type,
+ bool is_instance) const override {
return is_instance
? Location::RegisterLocation(RDX)
: Location::RegisterLocation(RSI);
}
- Location GetFpuLocation(DataType::Type type ATTRIBUTE_UNUSED) const override {
+ Location GetFpuLocation([[maybe_unused]] DataType::Type type) const override {
return Location::FpuRegisterLocation(XMM0);
}
@@ -502,9 +502,7 @@
block_labels_ = CommonInitializeLabels<Label>();
}
- bool NeedsTwoRegisters(DataType::Type type ATTRIBUTE_UNUSED) const override {
- return false;
- }
+ bool NeedsTwoRegisters([[maybe_unused]] DataType::Type type) const override { return false; }
// Check if the desired_string_load_kind is supported. If it is, return it,
// otherwise return a fall-back kind that should be used instead.
diff --git a/compiler/optimizing/codegen_test_utils.h b/compiler/optimizing/codegen_test_utils.h
index 7af9d0f..53163da 100644
--- a/compiler/optimizing/codegen_test_utils.h
+++ b/compiler/optimizing/codegen_test_utils.h
@@ -103,8 +103,8 @@
blocked_core_registers_[arm::R7] = false;
}
- void MaybeGenerateMarkingRegisterCheck(int code ATTRIBUTE_UNUSED,
- Location temp_loc ATTRIBUTE_UNUSED) override {
+ void MaybeGenerateMarkingRegisterCheck([[maybe_unused]] int code,
+ [[maybe_unused]] Location temp_loc) override {
// When turned on, the marking register checks in
// CodeGeneratorARMVIXL::MaybeGenerateMarkingRegisterCheck expects the
// Thread Register and the Marking Register to be set to
@@ -135,8 +135,8 @@
TestCodeGeneratorARM64(HGraph* graph, const CompilerOptions& compiler_options)
: arm64::CodeGeneratorARM64(graph, compiler_options) {}
- void MaybeGenerateMarkingRegisterCheck(int codem ATTRIBUTE_UNUSED,
- Location temp_loc ATTRIBUTE_UNUSED) override {
+ void MaybeGenerateMarkingRegisterCheck([[maybe_unused]] int codem,
+ [[maybe_unused]] Location temp_loc) override {
// When turned on, the marking register checks in
// CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck expect the
// Thread Register and the Marking Register to be set to
diff --git a/compiler/optimizing/constructor_fence_redundancy_elimination.cc b/compiler/optimizing/constructor_fence_redundancy_elimination.cc
index d9b7652..48635cf 100644
--- a/compiler/optimizing/constructor_fence_redundancy_elimination.cc
+++ b/compiler/optimizing/constructor_fence_redundancy_elimination.cc
@@ -78,7 +78,7 @@
VisitSetLocation(instruction, value);
}
- void VisitDeoptimize(HDeoptimize* instruction ATTRIBUTE_UNUSED) override {
+ void VisitDeoptimize([[maybe_unused]] HDeoptimize* instruction) override {
// Pessimize: Merge all fences.
MergeCandidateFences();
}
@@ -151,7 +151,7 @@
}
}
- void VisitSetLocation(HInstruction* inst ATTRIBUTE_UNUSED, HInstruction* store_input) {
+ void VisitSetLocation([[maybe_unused]] HInstruction* inst, HInstruction* store_input) {
// An object is considered "published" if it's stored onto the heap.
// Sidenote: A later "LSE" pass can still remove the fence if it proves the
// object doesn't actually escape.
diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h
index 893cd04..8cb7f6a 100644
--- a/compiler/optimizing/intrinsics.h
+++ b/compiler/optimizing/intrinsics.h
@@ -60,8 +60,7 @@
// Define visitor methods.
#define OPTIMIZING_INTRINSICS(Name, ...) \
- virtual void Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
- }
+ virtual void Visit##Name([[maybe_unused]] HInvoke* invoke) {}
#include "intrinsics_list.h"
INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
#undef INTRINSICS_LIST
@@ -254,11 +253,9 @@
// intrinsic to exploit e.g. no side-effects or exceptions, but otherwise not handled
// by this architecture-specific intrinsics code generator. Eventually it is implemented
// as a true method call.
-#define UNIMPLEMENTED_INTRINSIC(Arch, Name) \
-void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
-} \
-void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
-}
+#define UNIMPLEMENTED_INTRINSIC(Arch, Name) \
+ void IntrinsicLocationsBuilder##Arch::Visit##Name([[maybe_unused]] HInvoke* invoke) {} \
+ void IntrinsicCodeGenerator##Arch::Visit##Name([[maybe_unused]] HInvoke* invoke) {}
// Defines a list of unreached intrinsics: that is, method calls that are recognized as
// an intrinsic, and then always converted into HIR instructions before they reach any
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index fb9a419..2ec2134 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -3676,7 +3676,7 @@
locations->SetInAt(0, Location::Any());
}
-void IntrinsicCodeGeneratorARM64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
+void IntrinsicCodeGeneratorARM64::VisitReachabilityFence([[maybe_unused]] HInvoke* invoke) {}
void IntrinsicLocationsBuilderARM64::VisitCRC32Update(HInvoke* invoke) {
if (!codegen_->GetInstructionSetFeatures().HasCRC()) {
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc
index 366a468..d31593c 100644
--- a/compiler/optimizing/intrinsics_arm_vixl.cc
+++ b/compiler/optimizing/intrinsics_arm_vixl.cc
@@ -2653,7 +2653,7 @@
locations->SetInAt(0, Location::Any());
}
-void IntrinsicCodeGeneratorARMVIXL::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
+void IntrinsicCodeGeneratorARMVIXL::VisitReachabilityFence([[maybe_unused]] HInvoke* invoke) {}
void IntrinsicLocationsBuilderARMVIXL::VisitIntegerDivideUnsigned(HInvoke* invoke) {
CreateIntIntToIntSlowPathCallLocations(allocator_, invoke);
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index a93a8b5..02f312e 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -3503,7 +3503,7 @@
locations->SetInAt(0, Location::Any());
}
-void IntrinsicCodeGeneratorX86::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
+void IntrinsicCodeGeneratorX86::VisitReachabilityFence([[maybe_unused]] HInvoke* invoke) {}
void IntrinsicLocationsBuilderX86::VisitIntegerDivideUnsigned(HInvoke* invoke) {
LocationSummary* locations = new (allocator_) LocationSummary(invoke,
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index a573db8..99da844 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1871,7 +1871,7 @@
static void GenUnsafeGet(HInvoke* invoke,
DataType::Type type,
- bool is_volatile ATTRIBUTE_UNUSED,
+ [[maybe_unused]] bool is_volatile,
CodeGeneratorX86_64* codegen) {
X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler());
LocationSummary* locations = invoke->GetLocations();
@@ -3249,7 +3249,7 @@
locations->SetInAt(0, Location::Any());
}
-void IntrinsicCodeGeneratorX86_64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
+void IntrinsicCodeGeneratorX86_64::VisitReachabilityFence([[maybe_unused]] HInvoke* invoke) {}
static void CreateDivideUnsignedLocations(HInvoke* invoke, ArenaAllocator* allocator) {
LocationSummary* locations =
diff --git a/compiler/optimizing/loop_analysis.h b/compiler/optimizing/loop_analysis.h
index cec00fe..cd8f005 100644
--- a/compiler/optimizing/loop_analysis.h
+++ b/compiler/optimizing/loop_analysis.h
@@ -148,13 +148,15 @@
//
// Returns 'true' by default, should be overridden by particular target loop helper.
virtual bool IsLoopNonBeneficialForScalarOpts(
- LoopAnalysisInfo* loop_analysis_info ATTRIBUTE_UNUSED) const { return true; }
+ [[maybe_unused]] LoopAnalysisInfo* loop_analysis_info) const {
+ return true;
+ }
// Returns optimal scalar unrolling factor for the loop.
//
// Returns kNoUnrollingFactor by default, should be overridden by particular target loop helper.
virtual uint32_t GetScalarUnrollingFactor(
- const LoopAnalysisInfo* analysis_info ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] const LoopAnalysisInfo* analysis_info) const {
return LoopAnalysisInfo::kNoUnrollingFactor;
}
@@ -166,17 +168,17 @@
// Returns whether it is beneficial to fully unroll the loop.
//
// Returns 'false' by default, should be overridden by particular target loop helper.
- virtual bool IsFullUnrollingBeneficial(LoopAnalysisInfo* analysis_info ATTRIBUTE_UNUSED) const {
+ virtual bool IsFullUnrollingBeneficial([[maybe_unused]] LoopAnalysisInfo* analysis_info) const {
return false;
}
// Returns optimal SIMD unrolling factor for the loop.
//
// Returns kNoUnrollingFactor by default, should be overridden by particular target loop helper.
- virtual uint32_t GetSIMDUnrollingFactor(HBasicBlock* block ATTRIBUTE_UNUSED,
- int64_t trip_count ATTRIBUTE_UNUSED,
- uint32_t max_peel ATTRIBUTE_UNUSED,
- uint32_t vector_length ATTRIBUTE_UNUSED) const {
+ virtual uint32_t GetSIMDUnrollingFactor([[maybe_unused]] HBasicBlock* block,
+ [[maybe_unused]] int64_t trip_count,
+ [[maybe_unused]] uint32_t max_peel,
+ [[maybe_unused]] uint32_t vector_length) const {
return LoopAnalysisInfo::kNoUnrollingFactor;
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 4a0be10..01fbc99 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -2355,9 +2355,7 @@
return true;
}
- virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
- return false;
- }
+ virtual bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const { return false; }
// If this instruction will do an implicit null check, return the `HNullCheck` associated
// with it. Otherwise return null.
@@ -2570,7 +2568,7 @@
//
// Note: HEnvironment and some other fields are not copied and are set to default values, see
// 'explicit HInstruction(const HInstruction& other)' for details.
- virtual HInstruction* Clone(ArenaAllocator* arena ATTRIBUTE_UNUSED) const {
+ virtual HInstruction* Clone([[maybe_unused]] ArenaAllocator* arena) const {
LOG(FATAL) << "Cloning is not implemented for the instruction " <<
DebugName() << " " << GetId();
UNREACHABLE();
@@ -2598,7 +2596,7 @@
// Returns whether any data encoded in the two instructions is equal.
// This method does not look at the inputs. Both instructions must be
// of the same type, otherwise the method has undefined behavior.
- virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const {
+ virtual bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const {
return false;
}
@@ -3272,7 +3270,7 @@
class HNullConstant final : public HConstant {
public:
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -3830,7 +3828,7 @@
DataType::Type GetResultType() const { return GetType(); }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -3906,7 +3904,7 @@
}
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -3916,15 +3914,15 @@
HConstant* TryStaticEvaluation() const;
// Apply this operation to `x` and `y`.
- virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
- HNullConstant* y ATTRIBUTE_UNUSED) const {
+ virtual HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
+ [[maybe_unused]] HNullConstant* y) const {
LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
UNREACHABLE();
}
virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
- virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
- HIntConstant* y ATTRIBUTE_UNUSED) const {
+ virtual HConstant* Evaluate([[maybe_unused]] HLongConstant* x,
+ [[maybe_unused]] HIntConstant* y) const {
LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
UNREACHABLE();
}
@@ -4052,8 +4050,8 @@
bool IsCommutative() const override { return true; }
- HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
- HNullConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
+ [[maybe_unused]] HNullConstant* y) const override {
return MakeConstantCondition(true, GetDexPc());
}
HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
@@ -4099,8 +4097,8 @@
bool IsCommutative() const override { return true; }
- HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
- HNullConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
+ [[maybe_unused]] HNullConstant* y) const override {
return MakeConstantCondition(false, GetDexPc());
}
HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
@@ -4306,13 +4304,13 @@
HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -4348,13 +4346,13 @@
HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -4390,13 +4388,13 @@
HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -4432,13 +4430,13 @@
HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -4525,7 +4523,7 @@
return GetBias() == ComparisonBias::kGtBias;
}
- static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) {
+ static SideEffects SideEffectsForArchRuntimeCalls([[maybe_unused]] DataType::Type type) {
// Comparisons do not require a runtime call in any back end.
return SideEffects::None();
}
@@ -5018,7 +5016,7 @@
return input_records;
}
- bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
+ bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
// We do not access the method via object reference, so we cannot do an implicit null check.
// TODO: for intrinsics we can generate implicit null checks.
return false;
@@ -5602,10 +5600,14 @@
ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
}
// TODO: Evaluation for floating-point values.
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
+ return nullptr;
+ }
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
+ return nullptr;
+ }
DECLARE_INSTRUCTION(Min);
@@ -5637,10 +5639,14 @@
ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
}
// TODO: Evaluation for floating-point values.
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; }
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
+ return nullptr;
+ }
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
+ return nullptr;
+ }
DECLARE_INSTRUCTION(Max);
@@ -5702,7 +5708,7 @@
bool IsClonable() const override { return true; }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -5739,18 +5745,18 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
}
- HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
- HLongConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HLongConstant* value,
+ [[maybe_unused]] HLongConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
UNREACHABLE();
}
- HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
- HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* value,
+ [[maybe_unused]] HFloatConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
- HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* value,
+ [[maybe_unused]] HDoubleConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -5785,18 +5791,18 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
}
- HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
- HLongConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HLongConstant* value,
+ [[maybe_unused]] HLongConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
UNREACHABLE();
}
- HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
- HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* value,
+ [[maybe_unused]] HFloatConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
- HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* value,
+ [[maybe_unused]] HDoubleConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -5833,18 +5839,18 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
}
- HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
- HLongConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HLongConstant* value,
+ [[maybe_unused]] HLongConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
UNREACHABLE();
}
- HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
- HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* value,
+ [[maybe_unused]] HFloatConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
- HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* value,
+ [[maybe_unused]] HDoubleConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -5876,13 +5882,13 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -5914,13 +5920,13 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -5952,13 +5958,13 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -5996,18 +6002,18 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
}
- HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
- HLongConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HLongConstant* value,
+ [[maybe_unused]] HLongConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
UNREACHABLE();
}
- HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
- HFloatConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* value,
+ [[maybe_unused]] HFloatConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
- HDoubleConstant* distance ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* value,
+ [[maybe_unused]] HDoubleConstant* distance) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -6070,7 +6076,7 @@
}
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -6082,11 +6088,11 @@
HConstant* Evaluate(HLongConstant* x) const override {
return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -6104,7 +6110,7 @@
}
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -6116,15 +6122,15 @@
HConstant* Evaluate(HIntConstant* x) const override {
return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HLongConstant* x) const override {
LOG(FATAL) << DebugName() << " is not defined for long values";
UNREACHABLE();
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -6151,7 +6157,7 @@
bool IsClonable() const override { return true; }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
// Return whether the conversion is implicit. This includes conversion to the same type.
@@ -6183,7 +6189,7 @@
bool IsClonable() const override { return true; }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -6536,10 +6542,10 @@
bool IsClonable() const override { return true; }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
- bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
+ bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
// TODO: We can be smarter here.
// Currently, unless the array is the result of NewArray, the array access is always
// preceded by some form of null NullCheck necessary for the bounds check, usually
@@ -6643,7 +6649,7 @@
// Can throw ArrayStoreException.
bool CanThrow() const override { return NeedsTypeCheck(); }
- bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override {
+ bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
// TODO: Same as for ArrayGet.
return false;
}
@@ -6749,7 +6755,7 @@
bool IsClonable() const override { return true; }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
@@ -6793,7 +6799,7 @@
bool IsClonable() const override { return true; }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -7426,7 +7432,7 @@
}
// TODO: Make ClinitCheck clonable.
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -8346,7 +8352,7 @@
HInstruction* GetCondition() const { return InputAt(2); }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
@@ -8516,7 +8522,7 @@
bool IsClonable() const override { return true; }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
bool IsActualObject() const override { return false; }
@@ -8553,7 +8559,7 @@
graph_(graph) {}
virtual ~HGraphVisitor() {}
- virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
+ virtual void VisitInstruction([[maybe_unused]] HInstruction* instruction) {}
virtual void VisitBasicBlock(HBasicBlock* block);
// Visit the graph following basic block insertion order.
diff --git a/compiler/optimizing/nodes_shared.h b/compiler/optimizing/nodes_shared.h
index 27e6103..4b0187d 100644
--- a/compiler/optimizing/nodes_shared.h
+++ b/compiler/optimizing/nodes_shared.h
@@ -105,13 +105,13 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -160,7 +160,7 @@
bool IsClonable() const override { return true; }
bool CanBeMoved() const override { return true; }
- bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override {
+ bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
return true;
}
bool IsActualObject() const override { return false; }
diff --git a/compiler/optimizing/nodes_x86.h b/compiler/optimizing/nodes_x86.h
index e246390..14d9823 100644
--- a/compiler/optimizing/nodes_x86.h
+++ b/compiler/optimizing/nodes_x86.h
@@ -149,13 +149,13 @@
return GetBlock()->GetGraph()->GetLongConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
- HFloatConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
+ [[maybe_unused]] HFloatConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
- HDoubleConstant* y ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
+ [[maybe_unused]] HDoubleConstant* y) const override {
LOG(FATAL) << DebugName() << " is not defined for double values";
UNREACHABLE();
}
@@ -196,11 +196,11 @@
HConstant* Evaluate(HLongConstant* x) const override {
return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
}
- HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HFloatConstant* x) const override {
LOG(FATAL) << DebugName() << "is not defined for float values";
UNREACHABLE();
}
- HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override {
+ HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x) const override {
LOG(FATAL) << DebugName() << "is not defined for double values";
UNREACHABLE();
}
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 00eb6e5..8207501 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -447,8 +447,8 @@
<< HGraphVisualizer::InsertMetaDataAsCompilationBlock(isa_string + ' ' + features_string);
}
-bool OptimizingCompiler::CanCompileMethod(uint32_t method_idx ATTRIBUTE_UNUSED,
- const DexFile& dex_file ATTRIBUTE_UNUSED) const {
+bool OptimizingCompiler::CanCompileMethod([[maybe_unused]] uint32_t method_idx,
+ [[maybe_unused]] const DexFile& dex_file) const {
return true;
}
@@ -1221,7 +1221,7 @@
return new OptimizingCompiler(compiler_options, storage);
}
-bool EncodeArtMethodInInlineInfo(ArtMethod* method ATTRIBUTE_UNUSED) {
+bool EncodeArtMethodInInlineInfo([[maybe_unused]] ArtMethod* method) {
// Note: the runtime is null only for unit testing.
return Runtime::Current() == nullptr || !Runtime::Current()->IsAotCompiler();
}
diff --git a/compiler/optimizing/parallel_move_test.cc b/compiler/optimizing/parallel_move_test.cc
index a1c05e9..d2b9932 100644
--- a/compiler/optimizing/parallel_move_test.cc
+++ b/compiler/optimizing/parallel_move_test.cc
@@ -81,8 +81,8 @@
message_ << ")";
}
- void SpillScratch(int reg ATTRIBUTE_UNUSED) override {}
- void RestoreScratch(int reg ATTRIBUTE_UNUSED) override {}
+ void SpillScratch([[maybe_unused]] int reg) override {}
+ void RestoreScratch([[maybe_unused]] int reg) override {}
std::string GetMessage() const {
return message_.str();
@@ -126,7 +126,7 @@
return scratch;
}
- void FreeScratchLocation(Location loc ATTRIBUTE_UNUSED) override {}
+ void FreeScratchLocation([[maybe_unused]] Location loc) override {}
void EmitMove(size_t index) override {
MoveOperands* move = moves_[index];
diff --git a/compiler/optimizing/scheduler_arm.cc b/compiler/optimizing/scheduler_arm.cc
index 3f931c4..53ad2b1 100644
--- a/compiler/optimizing/scheduler_arm.cc
+++ b/compiler/optimizing/scheduler_arm.cc
@@ -610,7 +610,7 @@
}
}
-void SchedulingLatencyVisitorARM::VisitIntermediateAddress(HIntermediateAddress* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM::VisitIntermediateAddress([[maybe_unused]] HIntermediateAddress*) {
// Although the code generated is a simple `add` instruction, we found through empirical results
// that spacing it from its use in memory accesses was beneficial.
last_visited_internal_latency_ = kArmNopLatency;
@@ -618,11 +618,11 @@
}
void SchedulingLatencyVisitorARM::VisitIntermediateAddressIndex(
- HIntermediateAddressIndex* ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HIntermediateAddressIndex*) {
UNIMPLEMENTED(FATAL) << "IntermediateAddressIndex is not implemented for ARM";
}
-void SchedulingLatencyVisitorARM::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM::VisitMultiplyAccumulate([[maybe_unused]] HMultiplyAccumulate*) {
last_visited_latency_ = kArmMulIntegerLatency;
}
@@ -806,7 +806,7 @@
}
}
-void SchedulingLatencyVisitorARM::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM::VisitBoundsCheck([[maybe_unused]] HBoundsCheck*) {
last_visited_internal_latency_ = kArmIntegerOpLatency;
// Users do not use any data results.
last_visited_latency_ = 0;
@@ -866,22 +866,22 @@
HandleFieldSetLatencies(instruction, instruction->GetFieldInfo());
}
-void SchedulingLatencyVisitorARM::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM::VisitInstanceOf([[maybe_unused]] HInstanceOf*) {
last_visited_internal_latency_ = kArmCallInternalLatency;
last_visited_latency_ = kArmIntegerOpLatency;
}
-void SchedulingLatencyVisitorARM::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM::VisitInvoke([[maybe_unused]] HInvoke*) {
last_visited_internal_latency_ = kArmCallInternalLatency;
last_visited_latency_ = kArmCallLatency;
}
-void SchedulingLatencyVisitorARM::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM::VisitLoadString([[maybe_unused]] HLoadString*) {
last_visited_internal_latency_ = kArmLoadStringInternalLatency;
last_visited_latency_ = kArmMemoryLoadLatency;
}
-void SchedulingLatencyVisitorARM::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM::VisitNewArray([[maybe_unused]] HNewArray*) {
last_visited_internal_latency_ = kArmIntegerOpLatency + kArmCallInternalLatency;
last_visited_latency_ = kArmCallLatency;
}
diff --git a/compiler/optimizing/scheduler_arm.h b/compiler/optimizing/scheduler_arm.h
index 0da21c1..cedc12a 100644
--- a/compiler/optimizing/scheduler_arm.h
+++ b/compiler/optimizing/scheduler_arm.h
@@ -53,7 +53,7 @@
: codegen_(down_cast<CodeGeneratorARMVIXL*>(codegen)) {}
// Default visitor for instructions not handled specifically below.
- void VisitInstruction(HInstruction* ATTRIBUTE_UNUSED) override {
+ void VisitInstruction([[maybe_unused]] HInstruction*) override {
last_visited_latency_ = kArmIntegerOpLatency;
}
diff --git a/compiler/optimizing/scheduler_arm64.cc b/compiler/optimizing/scheduler_arm64.cc
index 3071afd..5113cf4 100644
--- a/compiler/optimizing/scheduler_arm64.cc
+++ b/compiler/optimizing/scheduler_arm64.cc
@@ -30,30 +30,30 @@
}
void SchedulingLatencyVisitorARM64::VisitBitwiseNegatedRight(
- HBitwiseNegatedRight* ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HBitwiseNegatedRight*) {
last_visited_latency_ = kArm64IntegerOpLatency;
}
void SchedulingLatencyVisitorARM64::VisitDataProcWithShifterOp(
- HDataProcWithShifterOp* ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HDataProcWithShifterOp*) {
last_visited_latency_ = kArm64DataProcWithShifterOpLatency;
}
void SchedulingLatencyVisitorARM64::VisitIntermediateAddress(
- HIntermediateAddress* ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HIntermediateAddress*) {
// Although the code generated is a simple `add` instruction, we found through empirical results
// that spacing it from its use in memory accesses was beneficial.
last_visited_latency_ = kArm64IntegerOpLatency + 2;
}
void SchedulingLatencyVisitorARM64::VisitIntermediateAddressIndex(
- HIntermediateAddressIndex* instr ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HIntermediateAddressIndex* instr) {
// Although the code generated is a simple `add` instruction, we found through empirical results
// that spacing it from its use in memory accesses was beneficial.
last_visited_latency_ = kArm64DataProcWithShifterOpLatency + 2;
}
-void SchedulingLatencyVisitorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitMultiplyAccumulate([[maybe_unused]] HMultiplyAccumulate*) {
last_visited_latency_ = kArm64MulIntegerLatency;
}
@@ -65,15 +65,15 @@
last_visited_latency_ = kArm64MemoryLoadLatency;
}
-void SchedulingLatencyVisitorARM64::VisitArrayLength(HArrayLength* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitArrayLength([[maybe_unused]] HArrayLength*) {
last_visited_latency_ = kArm64MemoryLoadLatency;
}
-void SchedulingLatencyVisitorARM64::VisitArraySet(HArraySet* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitArraySet([[maybe_unused]] HArraySet*) {
last_visited_latency_ = kArm64MemoryStoreLatency;
}
-void SchedulingLatencyVisitorARM64::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitBoundsCheck([[maybe_unused]] HBoundsCheck*) {
last_visited_internal_latency_ = kArm64IntegerOpLatency;
// Users do not use any data results.
last_visited_latency_ = 0;
@@ -113,21 +113,21 @@
}
}
-void SchedulingLatencyVisitorARM64::VisitInstanceFieldGet(HInstanceFieldGet* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitInstanceFieldGet([[maybe_unused]] HInstanceFieldGet*) {
last_visited_latency_ = kArm64MemoryLoadLatency;
}
-void SchedulingLatencyVisitorARM64::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitInstanceOf([[maybe_unused]] HInstanceOf*) {
last_visited_internal_latency_ = kArm64CallInternalLatency;
last_visited_latency_ = kArm64IntegerOpLatency;
}
-void SchedulingLatencyVisitorARM64::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitInvoke([[maybe_unused]] HInvoke*) {
last_visited_internal_latency_ = kArm64CallInternalLatency;
last_visited_latency_ = kArm64CallLatency;
}
-void SchedulingLatencyVisitorARM64::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitLoadString([[maybe_unused]] HLoadString*) {
last_visited_internal_latency_ = kArm64LoadStringInternalLatency;
last_visited_latency_ = kArm64MemoryLoadLatency;
}
@@ -138,7 +138,7 @@
: kArm64MulIntegerLatency;
}
-void SchedulingLatencyVisitorARM64::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitNewArray([[maybe_unused]] HNewArray*) {
last_visited_internal_latency_ = kArm64IntegerOpLatency + kArm64CallInternalLatency;
last_visited_latency_ = kArm64CallLatency;
}
@@ -181,7 +181,7 @@
}
}
-void SchedulingLatencyVisitorARM64::VisitStaticFieldGet(HStaticFieldGet* ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitStaticFieldGet([[maybe_unused]] HStaticFieldGet*) {
last_visited_latency_ = kArm64MemoryLoadLatency;
}
@@ -211,7 +211,7 @@
}
void SchedulingLatencyVisitorARM64::VisitVecReplicateScalar(
- HVecReplicateScalar* instr ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HVecReplicateScalar* instr) {
last_visited_latency_ = kArm64SIMDReplicateOpLatency;
}
@@ -223,7 +223,7 @@
HandleSimpleArithmeticSIMD(instr);
}
-void SchedulingLatencyVisitorARM64::VisitVecCnv(HVecCnv* instr ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitVecCnv([[maybe_unused]] HVecCnv* instr) {
last_visited_latency_ = kArm64SIMDTypeConversionInt2FPLatency;
}
@@ -279,19 +279,19 @@
HandleSimpleArithmeticSIMD(instr);
}
-void SchedulingLatencyVisitorARM64::VisitVecAnd(HVecAnd* instr ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitVecAnd([[maybe_unused]] HVecAnd* instr) {
last_visited_latency_ = kArm64SIMDIntegerOpLatency;
}
-void SchedulingLatencyVisitorARM64::VisitVecAndNot(HVecAndNot* instr ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitVecAndNot([[maybe_unused]] HVecAndNot* instr) {
last_visited_latency_ = kArm64SIMDIntegerOpLatency;
}
-void SchedulingLatencyVisitorARM64::VisitVecOr(HVecOr* instr ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitVecOr([[maybe_unused]] HVecOr* instr) {
last_visited_latency_ = kArm64SIMDIntegerOpLatency;
}
-void SchedulingLatencyVisitorARM64::VisitVecXor(HVecXor* instr ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::VisitVecXor([[maybe_unused]] HVecXor* instr) {
last_visited_latency_ = kArm64SIMDIntegerOpLatency;
}
@@ -312,13 +312,12 @@
}
void SchedulingLatencyVisitorARM64::VisitVecMultiplyAccumulate(
- HVecMultiplyAccumulate* instr ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] HVecMultiplyAccumulate* instr) {
last_visited_latency_ = kArm64SIMDMulIntegerLatency;
}
-void SchedulingLatencyVisitorARM64::HandleVecAddress(
- HVecMemoryOperation* instruction,
- size_t size ATTRIBUTE_UNUSED) {
+void SchedulingLatencyVisitorARM64::HandleVecAddress(HVecMemoryOperation* instruction,
+ [[maybe_unused]] size_t size) {
HInstruction* index = instruction->InputAt(1);
if (!index->IsConstant()) {
last_visited_internal_latency_ += kArm64DataProcWithShifterOpLatency;
diff --git a/compiler/optimizing/scheduler_arm64.h b/compiler/optimizing/scheduler_arm64.h
index ec41577..7ce00e0 100644
--- a/compiler/optimizing/scheduler_arm64.h
+++ b/compiler/optimizing/scheduler_arm64.h
@@ -59,7 +59,7 @@
class SchedulingLatencyVisitorARM64 final : public SchedulingLatencyVisitor {
public:
// Default visitor for instructions not handled specifically below.
- void VisitInstruction(HInstruction* ATTRIBUTE_UNUSED) override {
+ void VisitInstruction([[maybe_unused]] HInstruction*) override {
last_visited_latency_ = kArm64IntegerOpLatency;
}
diff --git a/compiler/utils/arm/assembler_arm_vixl.h b/compiler/utils/arm/assembler_arm_vixl.h
index e4d864b..025bba0 100644
--- a/compiler/utils/arm/assembler_arm_vixl.h
+++ b/compiler/utils/arm/assembler_arm_vixl.h
@@ -220,10 +220,10 @@
// Copy instructions out of assembly buffer into the given region of memory.
void FinalizeInstructions(const MemoryRegion& region) override;
- void Bind(Label* label ATTRIBUTE_UNUSED) override {
+ void Bind([[maybe_unused]] Label* label) override {
UNIMPLEMENTED(FATAL) << "Do not use Bind(Label*) for ARM";
}
- void Jump(Label* label ATTRIBUTE_UNUSED) override {
+ void Jump([[maybe_unused]] Label* label) override {
UNIMPLEMENTED(FATAL) << "Do not use Jump(Label*) for ARM";
}
diff --git a/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc b/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc
index 5487345..7a887fa 100644
--- a/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc
+++ b/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc
@@ -344,13 +344,13 @@
}
}
-void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED) {
+void ArmVIXLJNIMacroAssembler::SignExtend([[maybe_unused]] ManagedRegister mreg,
+ [[maybe_unused]] size_t size) {
UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
}
-void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED) {
+void ArmVIXLJNIMacroAssembler::ZeroExtend([[maybe_unused]] ManagedRegister mreg,
+ [[maybe_unused]] size_t size) {
UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
}
@@ -720,7 +720,7 @@
void ArmVIXLJNIMacroAssembler::Move(ManagedRegister mdst,
ManagedRegister msrc,
- size_t size ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] size_t size) {
ArmManagedRegister dst = mdst.AsArm();
if (kIsDebugBuild) {
// Check that the destination is not a scratch register.
@@ -861,13 +861,13 @@
___ Ldr(reg, MemOperand(reg));
}
-void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
- bool could_be_null ATTRIBUTE_UNUSED) {
+void ArmVIXLJNIMacroAssembler::VerifyObject([[maybe_unused]] ManagedRegister src,
+ [[maybe_unused]] bool could_be_null) {
// TODO: not validating references.
}
-void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED,
- bool could_be_null ATTRIBUTE_UNUSED) {
+void ArmVIXLJNIMacroAssembler::VerifyObject([[maybe_unused]] FrameOffset src,
+ [[maybe_unused]] bool could_be_null) {
// TODO: not validating references.
}
diff --git a/compiler/utils/arm64/assembler_arm64.h b/compiler/utils/arm64/assembler_arm64.h
index f816890..5eff8ca 100644
--- a/compiler/utils/arm64/assembler_arm64.h
+++ b/compiler/utils/arm64/assembler_arm64.h
@@ -145,10 +145,10 @@
// MaybeGenerateMarkingRegisterCheck and is passed to the BRK instruction.
void GenerateMarkingRegisterCheck(vixl::aarch64::Register temp, int code = 0);
- void Bind(Label* label ATTRIBUTE_UNUSED) override {
+ void Bind([[maybe_unused]] Label* label) override {
UNIMPLEMENTED(FATAL) << "Do not use Bind(Label*) for ARM64";
}
- void Jump(Label* label ATTRIBUTE_UNUSED) override {
+ void Jump([[maybe_unused]] Label* label) override {
UNIMPLEMENTED(FATAL) << "Do not use Jump(Label*) for ARM64";
}
diff --git a/compiler/utils/arm64/jni_macro_assembler_arm64.cc b/compiler/utils/arm64/jni_macro_assembler_arm64.cc
index 9e9f122..c538069 100644
--- a/compiler/utils/arm64/jni_macro_assembler_arm64.cc
+++ b/compiler/utils/arm64/jni_macro_assembler_arm64.cc
@@ -705,7 +705,7 @@
}
void Arm64JNIMacroAssembler::TryToTransitionFromRunnableToNative(
- JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs ATTRIBUTE_UNUSED) {
+ JNIMacroLabel* label, [[maybe_unused]] ArrayRef<const ManagedRegister> scratch_regs) {
constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative);
constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable);
constexpr ThreadOffset64 thread_flags_offset = Thread::ThreadFlagsOffset<kArm64PointerSize>();
@@ -734,8 +734,8 @@
void Arm64JNIMacroAssembler::TryToTransitionFromNativeToRunnable(
JNIMacroLabel* label,
- ArrayRef<const ManagedRegister> scratch_regs ATTRIBUTE_UNUSED,
- ManagedRegister return_reg ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] ArrayRef<const ManagedRegister> scratch_regs,
+ [[maybe_unused]] ManagedRegister return_reg) {
constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative);
constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable);
constexpr ThreadOffset64 thread_flags_offset = Thread::ThreadFlagsOffset<kArm64PointerSize>();
diff --git a/compiler/utils/assembler.h b/compiler/utils/assembler.h
index 13a5d9f..63747be 100644
--- a/compiler/utils/assembler.h
+++ b/compiler/utils/assembler.h
@@ -380,7 +380,7 @@
}
// TODO: Implement with disassembler.
- virtual void Comment(const char* format ATTRIBUTE_UNUSED, ...) {}
+ virtual void Comment([[maybe_unused]] const char* format, ...) {}
virtual void Bind(Label* label) = 0;
virtual void Jump(Label* label) = 0;
diff --git a/compiler/utils/assembler_test.h b/compiler/utils/assembler_test.h
index 4343825..810c843 100644
--- a/compiler/utils/assembler_test.h
+++ b/compiler/utils/assembler_test.h
@@ -803,19 +803,19 @@
}
// Secondary register names are the secondary view on registers, e.g., 32b on 64b systems.
- virtual std::string GetSecondaryRegisterName(const Reg& reg ATTRIBUTE_UNUSED) {
+ virtual std::string GetSecondaryRegisterName([[maybe_unused]] const Reg& reg) {
UNIMPLEMENTED(FATAL) << "Architecture does not support secondary registers";
UNREACHABLE();
}
// Tertiary register names are the tertiary view on registers, e.g., 16b on 64b systems.
- virtual std::string GetTertiaryRegisterName(const Reg& reg ATTRIBUTE_UNUSED) {
+ virtual std::string GetTertiaryRegisterName([[maybe_unused]] const Reg& reg) {
UNIMPLEMENTED(FATAL) << "Architecture does not support tertiary registers";
UNREACHABLE();
}
// Quaternary register names are the quaternary view on registers, e.g., 8b on 64b systems.
- virtual std::string GetQuaternaryRegisterName(const Reg& reg ATTRIBUTE_UNUSED) {
+ virtual std::string GetQuaternaryRegisterName([[maybe_unused]] const Reg& reg) {
UNIMPLEMENTED(FATAL) << "Architecture does not support quaternary registers";
UNREACHABLE();
}
@@ -1576,8 +1576,7 @@
}
// Override this to pad the code with NOPs to a certain size if needed.
- virtual void Pad(std::vector<uint8_t>& data ATTRIBUTE_UNUSED) {
- }
+ virtual void Pad([[maybe_unused]] std::vector<uint8_t>& data) {}
void DriverWrapper(const std::string& assembly_text, const std::string& test_name) {
assembler_->FinalizeCode();
diff --git a/compiler/utils/jni_macro_assembler_test.h b/compiler/utils/jni_macro_assembler_test.h
index ac8e7d3..0d0a992 100644
--- a/compiler/utils/jni_macro_assembler_test.h
+++ b/compiler/utils/jni_macro_assembler_test.h
@@ -77,8 +77,7 @@
private:
// Override this to pad the code with NOPs to a certain size if needed.
- virtual void Pad(std::vector<uint8_t>& data ATTRIBUTE_UNUSED) {
- }
+ virtual void Pad([[maybe_unused]] std::vector<uint8_t>& data) {}
void DriverWrapper(const std::string& assembly_text, const std::string& test_name) {
assembler_->FinalizeCode();
diff --git a/compiler/utils/riscv64/assembler_riscv64.h b/compiler/utils/riscv64/assembler_riscv64.h
index 13e7826..0d78123 100644
--- a/compiler/utils/riscv64/assembler_riscv64.h
+++ b/compiler/utils/riscv64/assembler_riscv64.h
@@ -358,10 +358,10 @@
/////////////////////////////// RV64 MACRO Instructions END ///////////////////////////////
- void Bind(Label* label ATTRIBUTE_UNUSED) override {
+ void Bind([[maybe_unused]] Label* label) override {
UNIMPLEMENTED(FATAL) << "TODO: Support branches.";
}
- void Jump(Label* label ATTRIBUTE_UNUSED) override {
+ void Jump([[maybe_unused]] Label* label) override {
UNIMPLEMENTED(FATAL) << "Do not use Jump for RISCV64";
}
diff --git a/compiler/utils/stack_checks.h b/compiler/utils/stack_checks.h
index d0fff73..1be4532 100644
--- a/compiler/utils/stack_checks.h
+++ b/compiler/utils/stack_checks.h
@@ -35,7 +35,7 @@
// stack overflow check on method entry.
//
// A frame is considered large when it's above kLargeFrameSize.
-static inline bool FrameNeedsStackCheck(size_t size, InstructionSet isa ATTRIBUTE_UNUSED) {
+static inline bool FrameNeedsStackCheck(size_t size, [[maybe_unused]] InstructionSet isa) {
return size >= kLargeFrameSize;
}
diff --git a/compiler/utils/x86/jni_macro_assembler_x86.cc b/compiler/utils/x86/jni_macro_assembler_x86.cc
index 154e50b..dfdbc18 100644
--- a/compiler/utils/x86/jni_macro_assembler_x86.cc
+++ b/compiler/utils/x86/jni_macro_assembler_x86.cc
@@ -83,7 +83,7 @@
void X86JNIMacroAssembler::RemoveFrame(size_t frame_size,
ArrayRef<const ManagedRegister> spill_regs,
- bool may_suspend ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] bool may_suspend) {
CHECK_ALIGNED(frame_size, kNativeStackAlignment);
cfi().RememberState();
// -kFramePointerSize for ArtMethod*.
diff --git a/compiler/utils/x86_64/assembler_x86_64_test.cc b/compiler/utils/x86_64/assembler_x86_64_test.cc
index a7c206a..5f7845f 100644
--- a/compiler/utils/x86_64/assembler_x86_64_test.cc
+++ b/compiler/utils/x86_64/assembler_x86_64_test.cc
@@ -2135,7 +2135,7 @@
"psrldq $2, %xmm15\n", "psrldqi");
}
-std::string x87_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED,
+std::string x87_fn([[maybe_unused]] AssemblerX86_64Test::Base* assembler_test,
x86_64::X86_64Assembler* assembler) {
std::ostringstream str;
@@ -2202,7 +2202,7 @@
"ret ${imm}", /*non-negative*/ true), "ret");
}
-std::string ret_and_leave_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED,
+std::string ret_and_leave_fn([[maybe_unused]] AssemblerX86_64Test::Base* assembler_test,
x86_64::X86_64Assembler* assembler) {
std::ostringstream str;
@@ -2513,7 +2513,7 @@
return x86_64::X86_64ManagedRegister::FromXmmRegister(r);
}
-std::string buildframe_test_fn(JNIMacroAssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED,
+std::string buildframe_test_fn([[maybe_unused]] JNIMacroAssemblerX86_64Test::Base* assembler_test,
x86_64::X86_64JNIMacroAssembler* assembler) {
// TODO: more interesting spill registers / entry spills.
@@ -2556,7 +2556,7 @@
DriverFn(&buildframe_test_fn, "BuildFrame");
}
-std::string removeframe_test_fn(JNIMacroAssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED,
+std::string removeframe_test_fn([[maybe_unused]] JNIMacroAssemblerX86_64Test::Base* assembler_test,
x86_64::X86_64JNIMacroAssembler* assembler) {
// TODO: more interesting spill registers / entry spills.
@@ -2588,7 +2588,7 @@
}
std::string increaseframe_test_fn(
- JNIMacroAssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIMacroAssemblerX86_64Test::Base* assembler_test,
x86_64::X86_64JNIMacroAssembler* assembler) {
assembler->IncreaseFrameSize(0U);
assembler->IncreaseFrameSize(kStackAlignment);
@@ -2608,7 +2608,7 @@
}
std::string decreaseframe_test_fn(
- JNIMacroAssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIMacroAssemblerX86_64Test::Base* assembler_test,
x86_64::X86_64JNIMacroAssembler* assembler) {
assembler->DecreaseFrameSize(0U);
assembler->DecreaseFrameSize(kStackAlignment);
diff --git a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
index 3888457..e9e6dbd 100644
--- a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
+++ b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
@@ -95,7 +95,7 @@
void X86_64JNIMacroAssembler::RemoveFrame(size_t frame_size,
ArrayRef<const ManagedRegister> spill_regs,
- bool may_suspend ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] bool may_suspend) {
CHECK_ALIGNED(frame_size, kNativeStackAlignment);
cfi().RememberState();
int gpr_count = 0;
@@ -515,7 +515,7 @@
}
void X86_64JNIMacroAssembler::TryToTransitionFromRunnableToNative(
- JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs ATTRIBUTE_UNUSED) {
+ JNIMacroLabel* label, [[maybe_unused]] ArrayRef<const ManagedRegister> scratch_regs) {
constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative);
constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable);
constexpr ThreadOffset64 thread_flags_offset = Thread::ThreadFlagsOffset<kX86_64PointerSize>();
diff --git a/dex2oat/dex2oat_test.cc b/dex2oat/dex2oat_test.cc
index db7e55a..be44207 100644
--- a/dex2oat/dex2oat_test.cc
+++ b/dex2oat/dex2oat_test.cc
@@ -207,7 +207,7 @@
// to what's already huge test methods).
class Dex2oatWithExpectedFilterTest : public Dex2oatTest {
protected:
- void CheckFilter(CompilerFilter::Filter expected ATTRIBUTE_UNUSED,
+ void CheckFilter([[maybe_unused]] CompilerFilter::Filter expected,
CompilerFilter::Filter actual) override {
EXPECT_EQ(expected_filter_, actual);
}
@@ -251,7 +251,7 @@
}
}
- virtual void CheckTargetResult(bool expect_use ATTRIBUTE_UNUSED) {
+ virtual void CheckTargetResult([[maybe_unused]] bool expect_use) {
// TODO: Ignore for now, as we won't capture any output (it goes to the logcat). We may do
// something for variants with file descriptor where we can control the lifetime of
// the swap file and thus take a look at it.
@@ -441,8 +441,8 @@
class Dex2oatVeryLargeTest : public Dex2oatTest {
protected:
- void CheckFilter(CompilerFilter::Filter input ATTRIBUTE_UNUSED,
- CompilerFilter::Filter result ATTRIBUTE_UNUSED) override {
+ void CheckFilter([[maybe_unused]] CompilerFilter::Filter input,
+ [[maybe_unused]] CompilerFilter::Filter result) override {
// Ignore, we'll do our own checks.
}
@@ -537,7 +537,7 @@
}
}
- void CheckTargetResult(bool expect_downgrade ATTRIBUTE_UNUSED) {
+ void CheckTargetResult([[maybe_unused]] bool expect_downgrade) {
// TODO: Ignore for now. May do something for fd things.
}
@@ -591,8 +591,8 @@
class Dex2oatLayoutTest : public Dex2oatTest {
protected:
- void CheckFilter(CompilerFilter::Filter input ATTRIBUTE_UNUSED,
- CompilerFilter::Filter result ATTRIBUTE_UNUSED) override {
+ void CheckFilter([[maybe_unused]] CompilerFilter::Filter input,
+ [[maybe_unused]] CompilerFilter::Filter result) override {
// Ignore, we'll do our own checks.
}
diff --git a/dex2oat/driver/compiler_driver.cc b/dex2oat/driver/compiler_driver.cc
index df7835d..a963fcf 100644
--- a/dex2oat/driver/compiler_driver.cc
+++ b/dex2oat/driver/compiler_driver.cc
@@ -274,12 +274,12 @@
}
CompilerDriver::~CompilerDriver() {
- compiled_methods_.Visit([this](const DexFileReference& ref ATTRIBUTE_UNUSED,
- CompiledMethod* method) {
- if (method != nullptr) {
- CompiledMethod::ReleaseSwapAllocatedCompiledMethod(GetCompiledMethodStorage(), method);
- }
- });
+ compiled_methods_.Visit(
+ [this]([[maybe_unused]] const DexFileReference& ref, CompiledMethod* method) {
+ if (method != nullptr) {
+ CompiledMethod::ReleaseSwapAllocatedCompiledMethod(GetCompiledMethodStorage(), method);
+ }
+ });
}
@@ -459,17 +459,16 @@
const DexFile& dex_file,
Handle<mirror::DexCache> dex_cache,
ProfileCompilationInfo::ProfileIndexType profile_index) {
- auto quick_fn = [profile_index](
- Thread* self ATTRIBUTE_UNUSED,
- CompilerDriver* driver,
- const dex::CodeItem* code_item,
- uint32_t access_flags,
- InvokeType invoke_type,
- uint16_t class_def_idx,
- uint32_t method_idx,
- Handle<mirror::ClassLoader> class_loader,
- const DexFile& dex_file,
- Handle<mirror::DexCache> dex_cache) {
+ auto quick_fn = [profile_index]([[maybe_unused]] Thread* self,
+ CompilerDriver* driver,
+ const dex::CodeItem* code_item,
+ uint32_t access_flags,
+ InvokeType invoke_type,
+ uint16_t class_def_idx,
+ uint32_t method_idx,
+ Handle<mirror::ClassLoader> class_loader,
+ const DexFile& dex_file,
+ Handle<mirror::DexCache> dex_cache) {
DCHECK(driver != nullptr);
const VerificationResults* results = driver->GetVerificationResults();
DCHECK(results != nullptr);
@@ -761,7 +760,7 @@
}
}
-void CompilerDriver::PrepareDexFilesForOatFile(TimingLogger* timings ATTRIBUTE_UNUSED) {
+void CompilerDriver::PrepareDexFilesForOatFile([[maybe_unused]] TimingLogger* timings) {
compiled_classes_.AddDexFiles(GetCompilerOptions().GetDexFilesForOatFile());
}
@@ -1231,8 +1230,7 @@
// Visitor for VisitReferences.
void operator()(ObjPtr<mirror::Object> object,
MemberOffset field_offset,
- bool is_static ATTRIBUTE_UNUSED) const
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
mirror::Object* ref = object->GetFieldObject<mirror::Object>(field_offset);
if (ref != nullptr) {
VisitClinitClassesObject(ref);
@@ -1240,13 +1238,13 @@
}
// java.lang.ref.Reference visitor for VisitReferences.
- void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const {}
+ void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass,
+ [[maybe_unused]] ObjPtr<mirror::Reference> ref) const {}
// Ignore class native roots.
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+ void VisitRootIfNonNull(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
+ void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
void Walk() REQUIRES_SHARED(Locks::mutator_lock_) {
// Find all the already-marked classes.
diff --git a/dex2oat/linker/code_info_table_deduper.cc b/dex2oat/linker/code_info_table_deduper.cc
index eff0292..e50f2a1 100644
--- a/dex2oat/linker/code_info_table_deduper.cc
+++ b/dex2oat/linker/code_info_table_deduper.cc
@@ -79,7 +79,7 @@
// Insert entries for large tables to the `dedupe_set_` and check for duplicates.
std::array<DedupeSetEntry*, kNumBitTables> dedupe_entries;
std::fill(dedupe_entries.begin(), dedupe_entries.end(), nullptr);
- CodeInfo::ForEachBitTableField([&](size_t i, auto member_pointer ATTRIBUTE_UNUSED) {
+ CodeInfo::ForEachBitTableField([&](size_t i, [[maybe_unused]] auto member_pointer) {
if (LIKELY(code_info.HasBitTable(i))) {
uint32_t table_bit_size = bit_table_bit_starts[i + 1u] - bit_table_bit_starts[i];
if (table_bit_size >= kMinDedupSize) {
@@ -109,7 +109,7 @@
});
writer_.WriteInterleavedVarints(header);
// Write bit tables and update offsets in `dedupe_set_` after encoding the `header`.
- CodeInfo::ForEachBitTableField([&](size_t i, auto member_pointer ATTRIBUTE_UNUSED) {
+ CodeInfo::ForEachBitTableField([&](size_t i, [[maybe_unused]] auto member_pointer) {
if (code_info.HasBitTable(i)) {
size_t current_bit_offset = writer_.NumberOfWrittenBits();
if (code_info.IsBitTableDeduped(i)) {
diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc
index 7e30541..b464f0a 100644
--- a/dex2oat/linker/image_writer.cc
+++ b/dex2oat/linker/image_writer.cc
@@ -705,16 +705,15 @@
: image_writer_(image_writer), early_exit_(early_exit), visited_(visited), result_(result) {}
ALWAYS_INLINE void VisitRootIfNonNull(
- mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
- REQUIRES_SHARED(Locks::mutator_lock_) { }
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {}
- ALWAYS_INLINE void VisitRoot(
- mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
- REQUIRES_SHARED(Locks::mutator_lock_) { }
+ ALWAYS_INLINE void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root)
+ const REQUIRES_SHARED(Locks::mutator_lock_) {}
- ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
- MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
+ ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj,
+ MemberOffset offset,
+ [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) {
mirror::Object* ref =
obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
@@ -747,8 +746,8 @@
}
}
- ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Reference> ref) const
+ ALWAYS_INLINE void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) {
operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
}
@@ -1581,10 +1580,9 @@
}
// Collects info for managed fields that reference managed Strings.
- void operator() (ObjPtr<mirror::Object> obj,
- MemberOffset member_offset,
- bool is_static ATTRIBUTE_UNUSED) const
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ void operator()(ObjPtr<mirror::Object> obj,
+ MemberOffset member_offset,
+ [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::Object> referred_obj =
obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(member_offset);
@@ -1595,8 +1593,7 @@
}
ALWAYS_INLINE
- void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Reference> ref) const
+ void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) {
operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
}
@@ -1614,25 +1611,25 @@
: helper_(helper), oat_index_(oat_index) {}
// We do not visit native roots. These are handled with other logic.
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {
+ void VisitRootIfNonNull(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {
LOG(FATAL) << "UNREACHABLE";
}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {
+ void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {
LOG(FATAL) << "UNREACHABLE";
}
ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
+ [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) {
mirror::Object* ref =
obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
VisitReference(ref);
}
- ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Reference> ref) const
+ ALWAYS_INLINE void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) {
operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
}
@@ -2759,17 +2756,17 @@
explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
}
- void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED,
- size_t count ATTRIBUTE_UNUSED,
- const RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void VisitRoots([[maybe_unused]] mirror::Object*** roots,
+ [[maybe_unused]] size_t count,
+ [[maybe_unused]] const RootInfo& info) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
LOG(FATAL) << "Unsupported";
}
void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] const RootInfo& info) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
for (size_t i = 0; i < count; ++i) {
// Copy the reference. Since we do not have the address for recording the relocation,
// it needs to be recorded explicitly by the user of FixupRootVisitor.
@@ -3034,15 +3031,15 @@
}
// We do not visit native roots. These are handled with other logic.
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {
+ void VisitRootIfNonNull(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {
LOG(FATAL) << "UNREACHABLE";
}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {
+ void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {
LOG(FATAL) << "UNREACHABLE";
}
- void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, MemberOffset offset, [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone, kWithoutReadBarrier>(offset);
// Copy the reference and record the fixup if necessary.
@@ -3051,8 +3048,7 @@
}
// java.lang.ref.Reference visitor.
- void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Reference> ref) const
+ void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
}
@@ -3122,14 +3118,14 @@
FixupClassVisitor(ImageWriter* image_writer, Object* copy)
: FixupVisitor(image_writer, copy) {}
- void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, MemberOffset offset, [[maybe_unused]] bool is_static) const
REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
DCHECK(obj->IsClass());
FixupVisitor::operator()(obj, offset, /*is_static*/false);
}
- void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
+ void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass,
+ [[maybe_unused]] ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
LOG(FATAL) << "Reference not expected here.";
}
diff --git a/dex2oat/linker/multi_oat_relative_patcher_test.cc b/dex2oat/linker/multi_oat_relative_patcher_test.cc
index a393eb8..c3133b6 100644
--- a/dex2oat/linker/multi_oat_relative_patcher_test.cc
+++ b/dex2oat/linker/multi_oat_relative_patcher_test.cc
@@ -34,7 +34,7 @@
MockPatcher() { }
uint32_t ReserveSpace(uint32_t offset,
- const CompiledMethod* compiled_method ATTRIBUTE_UNUSED,
+ [[maybe_unused]] const CompiledMethod* compiled_method,
MethodReference method_ref) override {
last_reserve_offset_ = offset;
last_reserve_method_ = method_ref;
@@ -76,7 +76,7 @@
return offset;
}
- void PatchCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
+ void PatchCall([[maybe_unused]] std::vector<uint8_t>* code,
uint32_t literal_offset,
uint32_t patch_offset,
uint32_t target_offset) override {
@@ -85,7 +85,7 @@
last_target_offset_ = target_offset;
}
- void PatchPcRelativeReference(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
+ void PatchPcRelativeReference([[maybe_unused]] std::vector<uint8_t>* code,
const LinkerPatch& patch,
uint32_t patch_offset,
uint32_t target_offset) override {
@@ -94,20 +94,20 @@
last_target_offset_ = target_offset;
}
- void PatchEntrypointCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED) override {
+ void PatchEntrypointCall([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset) override {
LOG(FATAL) << "UNIMPLEMENTED";
}
- void PatchBakerReadBarrierBranch(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED) override {
+ void PatchBakerReadBarrierBranch([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset) override {
LOG(FATAL) << "UNIMPLEMENTED";
}
std::vector<debug::MethodDebugInfo> GenerateThunkDebugInfo(
- uint32_t executable_offset ATTRIBUTE_UNUSED) override {
+ [[maybe_unused]] uint32_t executable_offset) override {
LOG(FATAL) << "UNIMPLEMENTED";
UNREACHABLE();
}
diff --git a/dex2oat/linker/oat_writer.cc b/dex2oat/linker/oat_writer.cc
index 222a5f4..413d71f 100644
--- a/dex2oat/linker/oat_writer.cc
+++ b/dex2oat/linker/oat_writer.cc
@@ -765,7 +765,7 @@
explicit InitBssLayoutMethodVisitor(OatWriter* writer)
: DexMethodVisitor(writer, /* offset */ 0u) {}
- bool VisitMethod(size_t class_def_method_index ATTRIBUTE_UNUSED,
+ bool VisitMethod([[maybe_unused]] size_t class_def_method_index,
const ClassAccessor::Method& method) override {
// Look for patches with .bss references and prepare maps with placeholders for their offsets.
CompiledMethod* compiled_method = writer_->compiler_driver_->GetCompiledMethod(
@@ -859,7 +859,7 @@
return true;
}
- bool VisitMethod(size_t class_def_method_index ATTRIBUTE_UNUSED,
+ bool VisitMethod([[maybe_unused]] size_t class_def_method_index,
const ClassAccessor::Method& method) override {
// Fill in the compiled_methods_ array for methods that have a
// CompiledMethod. We track the number of non-null entries in
@@ -1396,8 +1396,8 @@
}
bool VisitMethod(size_t class_def_method_index,
- const ClassAccessor::Method& method ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] const ClassAccessor::Method& method) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
OatClass* oat_class = &writer_->oat_classes_[oat_class_index_];
CompiledMethod* compiled_method = oat_class->GetCompiledMethod(class_def_method_index);
@@ -2504,7 +2504,7 @@
return true;
}
-void OatWriter::WriteQuickeningInfo(/*out*/std::vector<uint8_t>* ATTRIBUTE_UNUSED) {
+void OatWriter::WriteQuickeningInfo([[maybe_unused]] /*out*/ std::vector<uint8_t>*) {
// Nothing to write. Leave `vdex_size_` untouched and unaligned.
vdex_quickening_info_offset_ = vdex_size_;
size_quickening_info_alignment_ = 0;
diff --git a/dex2oat/linker/relative_patcher.cc b/dex2oat/linker/relative_patcher.cc
index 40acb0b..1c04812 100644
--- a/dex2oat/linker/relative_patcher.cc
+++ b/dex2oat/linker/relative_patcher.cc
@@ -44,8 +44,8 @@
RelativePatcherNone() { }
uint32_t ReserveSpace(uint32_t offset,
- const CompiledMethod* compiled_method ATTRIBUTE_UNUSED,
- MethodReference method_ref ATTRIBUTE_UNUSED) override {
+ [[maybe_unused]] const CompiledMethod* compiled_method,
+ [[maybe_unused]] MethodReference method_ref) override {
return offset; // No space reserved; no patches expected.
}
@@ -53,38 +53,38 @@
return offset; // No space reserved; no patches expected.
}
- uint32_t WriteThunks(OutputStream* out ATTRIBUTE_UNUSED, uint32_t offset) override {
+ uint32_t WriteThunks([[maybe_unused]] OutputStream* out, uint32_t offset) override {
return offset; // No thunks added; no patches expected.
}
- void PatchCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- uint32_t literal_offset ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED,
- uint32_t target_offset ATTRIBUTE_UNUSED) override {
+ void PatchCall([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] uint32_t literal_offset,
+ [[maybe_unused]] uint32_t patch_offset,
+ [[maybe_unused]] uint32_t target_offset) override {
LOG(FATAL) << "Unexpected relative call patch.";
}
- void PatchPcRelativeReference(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED,
- uint32_t target_offset ATTRIBUTE_UNUSED) override {
+ void PatchPcRelativeReference([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset,
+ [[maybe_unused]] uint32_t target_offset) override {
LOG(FATAL) << "Unexpected relative dex cache array patch.";
}
- void PatchEntrypointCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED) override {
+ void PatchEntrypointCall([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset) override {
LOG(FATAL) << "Unexpected entrypoint call patch.";
}
- void PatchBakerReadBarrierBranch(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED) override {
+ void PatchBakerReadBarrierBranch([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset) override {
LOG(FATAL) << "Unexpected baker read barrier branch patch.";
}
std::vector<debug::MethodDebugInfo> GenerateThunkDebugInfo(
- uint32_t executable_offset ATTRIBUTE_UNUSED) override {
+ [[maybe_unused]] uint32_t executable_offset) override {
return std::vector<debug::MethodDebugInfo>(); // No thunks added.
}
diff --git a/dex2oat/linker/x86/relative_patcher_x86.cc b/dex2oat/linker/x86/relative_patcher_x86.cc
index a444446..5b8cf47 100644
--- a/dex2oat/linker/x86/relative_patcher_x86.cc
+++ b/dex2oat/linker/x86/relative_patcher_x86.cc
@@ -56,15 +56,15 @@
(*code)[literal_offset + 3u] = static_cast<uint8_t>(diff >> 24);
}
-void X86RelativePatcher::PatchEntrypointCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED) {
+void X86RelativePatcher::PatchEntrypointCall([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset) {
LOG(FATAL) << "UNIMPLEMENTED";
}
-void X86RelativePatcher::PatchBakerReadBarrierBranch(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED) {
+void X86RelativePatcher::PatchBakerReadBarrierBranch([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset) {
LOG(FATAL) << "UNIMPLEMENTED";
}
diff --git a/dex2oat/linker/x86/relative_patcher_x86_base.cc b/dex2oat/linker/x86/relative_patcher_x86_base.cc
index 07cd724..1104b8a 100644
--- a/dex2oat/linker/x86/relative_patcher_x86_base.cc
+++ b/dex2oat/linker/x86/relative_patcher_x86_base.cc
@@ -23,8 +23,8 @@
uint32_t X86BaseRelativePatcher::ReserveSpace(
uint32_t offset,
- const CompiledMethod* compiled_method ATTRIBUTE_UNUSED,
- MethodReference method_ref ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] const CompiledMethod* compiled_method,
+ [[maybe_unused]] MethodReference method_ref) {
return offset; // No space reserved; no limit on relative call distance.
}
@@ -32,12 +32,12 @@
return offset; // No space reserved; no limit on relative call distance.
}
-uint32_t X86BaseRelativePatcher::WriteThunks(OutputStream* out ATTRIBUTE_UNUSED, uint32_t offset) {
+uint32_t X86BaseRelativePatcher::WriteThunks([[maybe_unused]] OutputStream* out, uint32_t offset) {
return offset; // No thunks added; no limit on relative call distance.
}
std::vector<debug::MethodDebugInfo> X86BaseRelativePatcher::GenerateThunkDebugInfo(
- uint32_t executable_offset ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] uint32_t executable_offset) {
return std::vector<debug::MethodDebugInfo>(); // No thunks added.
}
diff --git a/dex2oat/linker/x86_64/relative_patcher_x86_64.cc b/dex2oat/linker/x86_64/relative_patcher_x86_64.cc
index 629affc..1177417 100644
--- a/dex2oat/linker/x86_64/relative_patcher_x86_64.cc
+++ b/dex2oat/linker/x86_64/relative_patcher_x86_64.cc
@@ -34,15 +34,15 @@
reinterpret_cast<unaligned_int32_t*>(&(*code)[patch.LiteralOffset()])[0] = displacement;
}
-void X86_64RelativePatcher::PatchEntrypointCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED) {
+void X86_64RelativePatcher::PatchEntrypointCall([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset) {
LOG(FATAL) << "UNIMPLEMENTED";
}
-void X86_64RelativePatcher::PatchBakerReadBarrierBranch(std::vector<uint8_t>* code ATTRIBUTE_UNUSED,
- const LinkerPatch& patch ATTRIBUTE_UNUSED,
- uint32_t patch_offset ATTRIBUTE_UNUSED) {
+void X86_64RelativePatcher::PatchBakerReadBarrierBranch([[maybe_unused]] std::vector<uint8_t>* code,
+ [[maybe_unused]] const LinkerPatch& patch,
+ [[maybe_unused]] uint32_t patch_offset) {
LOG(FATAL) << "UNIMPLEMENTED";
}
diff --git a/dex2oat/utils/swap_space.h b/dex2oat/utils/swap_space.h
index aba6485..e4895ac 100644
--- a/dex2oat/utils/swap_space.h
+++ b/dex2oat/utils/swap_space.h
@@ -176,7 +176,7 @@
pointer address(reference x) const { return &x; }
const_pointer address(const_reference x) const { return &x; }
- pointer allocate(size_type n, SwapAllocator<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) {
+ pointer allocate(size_type n, [[maybe_unused]] SwapAllocator<void>::pointer hint = nullptr) {
DCHECK_LE(n, max_size());
if (swap_space_ == nullptr) {
T* result = reinterpret_cast<T*>(malloc(n * sizeof(T)));
diff --git a/dex2oat/verifier_deps_test.cc b/dex2oat/verifier_deps_test.cc
index bb61200..0b72382 100644
--- a/dex2oat/verifier_deps_test.cc
+++ b/dex2oat/verifier_deps_test.cc
@@ -46,9 +46,9 @@
: CompilerCallbacks(CompilerCallbacks::CallbackMode::kCompileApp),
deps_(nullptr) {}
- void AddUncompilableMethod(MethodReference ref ATTRIBUTE_UNUSED) override {}
- void AddUncompilableClass(ClassReference ref ATTRIBUTE_UNUSED) override {}
- void ClassRejected(ClassReference ref ATTRIBUTE_UNUSED) override {}
+ void AddUncompilableMethod([[maybe_unused]] MethodReference ref) override {}
+ void AddUncompilableClass([[maybe_unused]] ClassReference ref) override {}
+ void ClassRejected([[maybe_unused]] ClassReference ref) override {}
verifier::VerifierDeps* GetVerifierDeps() const override { return deps_; }
void SetVerifierDeps(verifier::VerifierDeps* deps) override { deps_ = deps; }
diff --git a/dexlayout/dex_ir.cc b/dexlayout/dex_ir.cc
index 3917847..82c0389 100644
--- a/dexlayout/dex_ir.cc
+++ b/dexlayout/dex_ir.cc
@@ -30,11 +30,9 @@
namespace art {
namespace dex_ir {
-static uint32_t HeaderOffset(const dex_ir::Header* header ATTRIBUTE_UNUSED) {
- return 0;
-}
+static uint32_t HeaderOffset([[maybe_unused]] const dex_ir::Header* header) { return 0; }
-static uint32_t HeaderSize(const dex_ir::Header* header ATTRIBUTE_UNUSED) {
+static uint32_t HeaderSize([[maybe_unused]] const dex_ir::Header* header) {
// Size is in elements, so there is only one header.
return 1;
}
diff --git a/dexlist/dexlist.cc b/dexlist/dexlist.cc
index 3603675..553e364 100644
--- a/dexlist/dexlist.cc
+++ b/dexlist/dexlist.cc
@@ -84,8 +84,11 @@
* Dumps a method.
*/
static void dumpMethod(const DexFile* pDexFile,
- const char* fileName, u4 idx, u4 flags ATTRIBUTE_UNUSED,
- const dex::CodeItem* pCode, u4 codeOffset) {
+ const char* fileName,
+ u4 idx,
+ [[maybe_unused]] u4 flags,
+ const dex::CodeItem* pCode,
+ u4 codeOffset) {
// Abstract and native methods don't get listed.
if (pCode == nullptr || codeOffset == 0) {
return;
diff --git a/dt_fd_forward/dt_fd_forward.cc b/dt_fd_forward/dt_fd_forward.cc
index 0ff8770..b46810f 100644
--- a/dt_fd_forward/dt_fd_forward.cc
+++ b/dt_fd_forward/dt_fd_forward.cc
@@ -691,7 +691,7 @@
class JdwpTransportFunctions {
public:
- static jdwpTransportError GetCapabilities(jdwpTransportEnv* env ATTRIBUTE_UNUSED,
+ static jdwpTransportError GetCapabilities([[maybe_unused]] jdwpTransportEnv* env,
/*out*/ JDWPTransportCapabilities* capabilities_ptr) {
// We don't support any of the optional capabilities (can_timeout_attach, can_timeout_accept,
// can_timeout_handshake) so just return a zeroed capabilities ptr.
@@ -703,8 +703,8 @@
// Address is <sock_fd>
static jdwpTransportError Attach(jdwpTransportEnv* env,
const char* address,
- jlong attach_timeout ATTRIBUTE_UNUSED,
- jlong handshake_timeout ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jlong attach_timeout,
+ [[maybe_unused]] jlong handshake_timeout) {
if (address == nullptr || *address == '\0') {
return ERR(ILLEGAL_ARGUMENT);
}
@@ -743,8 +743,8 @@
}
static jdwpTransportError Accept(jdwpTransportEnv* env,
- jlong accept_timeout ATTRIBUTE_UNUSED,
- jlong handshake_timeout ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jlong accept_timeout,
+ [[maybe_unused]] jlong handshake_timeout) {
return AsFdForward(env)->Accept();
}
@@ -784,11 +784,10 @@
JdwpTransportFunctions::GetLastError,
};
-extern "C"
-JNIEXPORT jint JNICALL jdwpTransport_OnLoad(JavaVM* vm ATTRIBUTE_UNUSED,
- jdwpTransportCallback* cb,
- jint version,
- jdwpTransportEnv** /*out*/env) {
+extern "C" JNIEXPORT jint JNICALL jdwpTransport_OnLoad([[maybe_unused]] JavaVM* vm,
+ jdwpTransportCallback* cb,
+ jint version,
+ jdwpTransportEnv** /*out*/ env) {
if (version != JDWPTRANSPORT_VERSION_1_0) {
LOG(ERROR) << "unknown version " << version;
return JNI_EVERSION;
diff --git a/imgdiag/imgdiag.cc b/imgdiag/imgdiag.cc
index e3310e9..26f8a00 100644
--- a/imgdiag/imgdiag.cc
+++ b/imgdiag/imgdiag.cc
@@ -728,7 +728,7 @@
ArrayRef<uint8_t> zygote_contents,
const android::procinfo::MapInfo& boot_map,
const ImageHeader& image_header,
- bool dump_dirty_objects ATTRIBUTE_UNUSED)
+ [[maybe_unused]] bool dump_dirty_objects)
: RegionCommon<ArtMethod>(os, remote_contents, zygote_contents, boot_map, image_header),
os_(*os) {
// Prepare the table for offset to member lookups.
@@ -749,12 +749,9 @@
RegionCommon<ArtMethod>::image_header_.VisitPackedArtMethods(*visitor, base, pointer_size);
}
- void VisitEntry(ArtMethod* method ATTRIBUTE_UNUSED)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- }
+ void VisitEntry([[maybe_unused]] ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {}
- void AddCleanEntry(ArtMethod* method ATTRIBUTE_UNUSED) {
- }
+ void AddCleanEntry([[maybe_unused]] ArtMethod* method) {}
void AddFalseDirtyEntry(ArtMethod* method)
REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -781,8 +778,8 @@
void DiffEntryContents(ArtMethod* method,
uint8_t* remote_bytes,
const uint8_t* base_ptr,
- bool log_dirty_objects ATTRIBUTE_UNUSED,
- size_t entry_offset ATTRIBUTE_UNUSED)
+ [[maybe_unused]] bool log_dirty_objects,
+ [[maybe_unused]] size_t entry_offset)
REQUIRES_SHARED(Locks::mutator_lock_) {
const char* tabs = " ";
os_ << tabs << "ArtMethod " << ArtMethod::PrettyMethod(method) << "\n";
diff --git a/libartbase/base/allocator.cc b/libartbase/base/allocator.cc
index 6393672..71a4f6c 100644
--- a/libartbase/base/allocator.cc
+++ b/libartbase/base/allocator.cc
@@ -49,12 +49,12 @@
NoopAllocator() {}
~NoopAllocator() {}
- void* Alloc(size_t size ATTRIBUTE_UNUSED) override {
+ void* Alloc([[maybe_unused]] size_t size) override {
LOG(FATAL) << "NoopAllocator::Alloc should not be called";
UNREACHABLE();
}
- void Free(void* p ATTRIBUTE_UNUSED) override {
+ void Free([[maybe_unused]] void* p) override {
// Noop.
}
diff --git a/libartbase/base/allocator.h b/libartbase/base/allocator.h
index 81f3a60..24374a2 100644
--- a/libartbase/base/allocator.h
+++ b/libartbase/base/allocator.h
@@ -115,8 +115,8 @@
// Used internally by STL data structures.
template <class U>
- TrackingAllocatorImpl(
- const TrackingAllocatorImpl<U, kTag>& alloc ATTRIBUTE_UNUSED) noexcept {}
+ explicit TrackingAllocatorImpl(
+ [[maybe_unused]] const TrackingAllocatorImpl<U, kTag>& alloc) noexcept {}
// Used internally by STL data structures.
TrackingAllocatorImpl() noexcept {
@@ -130,7 +130,7 @@
using other = TrackingAllocatorImpl<U, kTag>;
};
- pointer allocate(size_type n, const_pointer hint ATTRIBUTE_UNUSED = 0) {
+ pointer allocate(size_type n, [[maybe_unused]] const_pointer hint = 0) {
const size_t size = n * sizeof(T);
TrackedAllocators::RegisterAllocation(GetTag(), size);
return reinterpret_cast<pointer>(malloc(size));
diff --git a/libartbase/base/arena_allocator.h b/libartbase/base/arena_allocator.h
index c4f713a..10f7f31 100644
--- a/libartbase/base/arena_allocator.h
+++ b/libartbase/base/arena_allocator.h
@@ -120,13 +120,13 @@
ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
- void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
- void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
+ void Copy([[maybe_unused]] const ArenaAllocatorStatsImpl& other) {}
+ void RecordAlloc([[maybe_unused]] size_t bytes, [[maybe_unused]] ArenaAllocKind kind) {}
size_t NumAllocations() const { return 0u; }
size_t BytesAllocated() const { return 0u; }
- void Dump(std::ostream& os ATTRIBUTE_UNUSED,
- const Arena* first ATTRIBUTE_UNUSED,
- ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
+ void Dump([[maybe_unused]] std::ostream& os,
+ [[maybe_unused]] const Arena* first,
+ [[maybe_unused]] ssize_t lost_bytes_adjustment) const {}
};
template <bool kCount>
diff --git a/libartbase/base/arena_bit_vector.cc b/libartbase/base/arena_bit_vector.cc
index 138a5df..e7acb60 100644
--- a/libartbase/base/arena_bit_vector.cc
+++ b/libartbase/base/arena_bit_vector.cc
@@ -28,7 +28,7 @@
class ArenaBitVectorAllocatorKindImpl<false> {
public:
// Not tracking allocations, ignore the supplied kind and arbitrarily provide kArenaAllocSTL.
- explicit ArenaBitVectorAllocatorKindImpl(ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
+ explicit ArenaBitVectorAllocatorKindImpl([[maybe_unused]] ArenaAllocKind kind) {}
ArenaBitVectorAllocatorKindImpl(const ArenaBitVectorAllocatorKindImpl&) = default;
ArenaBitVectorAllocatorKindImpl& operator=(const ArenaBitVectorAllocatorKindImpl&) = default;
ArenaAllocKind Kind() { return kArenaAllocGrowableBitMap; }
diff --git a/libartbase/base/arena_containers.h b/libartbase/base/arena_containers.h
index f205bc4..db1d040 100644
--- a/libartbase/base/arena_containers.h
+++ b/libartbase/base/arena_containers.h
@@ -109,7 +109,7 @@
class ArenaAllocatorAdapterKindImpl<false> {
public:
// Not tracking allocations, ignore the supplied kind and arbitrarily provide kArenaAllocSTL.
- explicit ArenaAllocatorAdapterKindImpl(ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
+ explicit ArenaAllocatorAdapterKindImpl([[maybe_unused]] ArenaAllocKind kind) {}
ArenaAllocatorAdapterKindImpl(const ArenaAllocatorAdapterKindImpl&) = default;
ArenaAllocatorAdapterKindImpl& operator=(const ArenaAllocatorAdapterKindImpl&) = default;
ArenaAllocKind Kind() { return kArenaAllocSTL; }
@@ -199,7 +199,7 @@
const_pointer address(const_reference x) const { return &x; }
pointer allocate(size_type n,
- ArenaAllocatorAdapter<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) {
+ [[maybe_unused]] ArenaAllocatorAdapter<void>::pointer hint = nullptr) {
DCHECK_LE(n, max_size());
return allocator_->AllocArray<T>(n, ArenaAllocatorAdapterKind::Kind());
}
diff --git a/libartbase/base/bit_memory_region.h b/libartbase/base/bit_memory_region.h
index baac2f5..f3dbd63 100644
--- a/libartbase/base/bit_memory_region.h
+++ b/libartbase/base/bit_memory_region.h
@@ -157,11 +157,11 @@
ALWAYS_INLINE void CopyBits(const BitMemoryRegion& src) {
DCHECK_EQ(size_in_bits(), src.size_in_bits());
// Hopefully, the loads of the unused `value` shall be optimized away.
- VisitChunks(
- [this, &src](size_t offset, size_t num_bits, size_t value ATTRIBUTE_UNUSED) ALWAYS_INLINE {
- StoreChunk(offset, src.LoadBits(offset, num_bits), num_bits);
- return true;
- });
+ VisitChunks([this, &src](size_t offset, size_t num_bits, [[maybe_unused]] size_t value)
+ ALWAYS_INLINE {
+ StoreChunk(offset, src.LoadBits(offset, num_bits), num_bits);
+ return true;
+ });
}
// And bits from other bit region.
@@ -194,9 +194,8 @@
// Count the number of set bits within this region.
ALWAYS_INLINE size_t PopCount() const {
size_t result = 0u;
- VisitChunks([&](size_t offset ATTRIBUTE_UNUSED,
- size_t num_bits ATTRIBUTE_UNUSED,
- size_t value) ALWAYS_INLINE {
+ VisitChunks([&]([[maybe_unused]] size_t offset, [[maybe_unused]] size_t num_bits, size_t value)
+ ALWAYS_INLINE {
result += POPCOUNT(value);
return true;
});
@@ -210,11 +209,9 @@
// Check if this region has all bits clear.
ALWAYS_INLINE bool HasAllBitsClear() const {
- return VisitChunks([](size_t offset ATTRIBUTE_UNUSED,
- size_t num_bits ATTRIBUTE_UNUSED,
- size_t value) ALWAYS_INLINE {
- return value == 0u;
- });
+ return VisitChunks(
+ []([[maybe_unused]] size_t offset, [[maybe_unused]] size_t num_bits, size_t value)
+ ALWAYS_INLINE { return value == 0u; });
}
// Check if this region has any bit set.
diff --git a/libartbase/base/debug_stack.h b/libartbase/base/debug_stack.h
index 4bbaee8..f3ee310 100644
--- a/libartbase/base/debug_stack.h
+++ b/libartbase/base/debug_stack.h
@@ -55,7 +55,7 @@
template <>
class DebugStackReferenceImpl<false> {
public:
- explicit DebugStackReferenceImpl(DebugStackRefCounterImpl<false>* counter ATTRIBUTE_UNUSED) {}
+ explicit DebugStackReferenceImpl([[maybe_unused]] DebugStackRefCounterImpl<false>* counter) {}
DebugStackReferenceImpl(const DebugStackReferenceImpl& other) = default;
DebugStackReferenceImpl& operator=(const DebugStackReferenceImpl& other) = default;
void CheckTop() { }
@@ -64,7 +64,7 @@
template <>
class DebugStackIndirectTopRefImpl<false> {
public:
- explicit DebugStackIndirectTopRefImpl(DebugStackReferenceImpl<false>* ref ATTRIBUTE_UNUSED) {}
+ explicit DebugStackIndirectTopRefImpl([[maybe_unused]] DebugStackReferenceImpl<false>* ref) {}
DebugStackIndirectTopRefImpl(const DebugStackIndirectTopRefImpl& other) = default;
DebugStackIndirectTopRefImpl& operator=(const DebugStackIndirectTopRefImpl& other) = default;
void CheckTop() { }
diff --git a/libartbase/base/hash_set.h b/libartbase/base/hash_set.h
index 3f3c8f2..fec9440 100644
--- a/libartbase/base/hash_set.h
+++ b/libartbase/base/hash_set.h
@@ -502,10 +502,10 @@
// Insert an element with hint.
// Note: The hint is not very useful for a HashSet<> unless there are many hash conflicts
// and in that case the use of HashSet<> itself should be reconsidered.
- std::pair<iterator, bool> insert(const_iterator hint ATTRIBUTE_UNUSED, const T& element) {
+ std::pair<iterator, bool> insert([[maybe_unused]] const_iterator hint, const T& element) {
return insert(element);
}
- std::pair<iterator, bool> insert(const_iterator hint ATTRIBUTE_UNUSED, T&& element) {
+ std::pair<iterator, bool> insert([[maybe_unused]] const_iterator hint, T&& element) {
return insert(std::move(element));
}
@@ -710,7 +710,7 @@
if (UNLIKELY(NumBuckets() == 0)) {
return 0;
}
- auto fail_fn = [&](size_t index ATTRIBUTE_UNUSED) ALWAYS_INLINE { return NumBuckets(); };
+ auto fail_fn = [&]([[maybe_unused]] size_t index) ALWAYS_INLINE { return NumBuckets(); };
return FindIndexImpl(element, hash, fail_fn);
}
diff --git a/libartbase/base/intrusive_forward_list.h b/libartbase/base/intrusive_forward_list.h
index 2e66f3e..06dd407 100644
--- a/libartbase/base/intrusive_forward_list.h
+++ b/libartbase/base/intrusive_forward_list.h
@@ -35,9 +35,9 @@
explicit IntrusiveForwardListHook(const IntrusiveForwardListHook* hook) : next_hook(hook) { }
// Allow copyable values but do not copy the hook, it is not part of the value.
- IntrusiveForwardListHook(const IntrusiveForwardListHook& other ATTRIBUTE_UNUSED)
- : next_hook(nullptr) { }
- IntrusiveForwardListHook& operator=(const IntrusiveForwardListHook& src ATTRIBUTE_UNUSED) {
+ explicit IntrusiveForwardListHook([[maybe_unused]] const IntrusiveForwardListHook& other)
+ : next_hook(nullptr) {}
+ IntrusiveForwardListHook& operator=([[maybe_unused]] const IntrusiveForwardListHook& src) {
return *this;
}
diff --git a/libartbase/base/intrusive_forward_list_test.cc b/libartbase/base/intrusive_forward_list_test.cc
index 595210b..180c3d6 100644
--- a/libartbase/base/intrusive_forward_list_test.cc
+++ b/libartbase/base/intrusive_forward_list_test.cc
@@ -578,7 +578,7 @@
ref.remove_if(odd);
ifl.remove_if(odd);
ASSERT_LISTS_EQUAL(ref, ifl);
- auto all = [](ValueType value ATTRIBUTE_UNUSED) { return true; };
+ auto all = []([[maybe_unused]] ValueType value) { return true; };
ref.remove_if(all);
ifl.remove_if(all);
ASSERT_LISTS_EQUAL(ref, ifl);
diff --git a/libartbase/base/membarrier.cc b/libartbase/base/membarrier.cc
index 48f47df..243b908 100644
--- a/libartbase/base/membarrier.cc
+++ b/libartbase/base/membarrier.cc
@@ -75,7 +75,7 @@
#else // __NR_membarrier
-int membarrier(MembarrierCommand command ATTRIBUTE_UNUSED) {
+int membarrier([[maybe_unused]] MembarrierCommand command) {
// In principle this could be supported on linux, but Android's prebuilt glibc does not include
// the system call number defintions (b/111199492).
errno = ENOSYS;
diff --git a/libartbase/base/memfd.cc b/libartbase/base/memfd.cc
index 8512a3a..e96391a 100644
--- a/libartbase/base/memfd.cc
+++ b/libartbase/base/memfd.cc
@@ -68,7 +68,7 @@
#else // __NR_memfd_create
-int memfd_create(const char* name ATTRIBUTE_UNUSED, unsigned int flags ATTRIBUTE_UNUSED) {
+int memfd_create([[maybe_unused]] const char* name, [[maybe_unused]] unsigned int flags) {
errno = ENOSYS;
return -1;
}
diff --git a/libartbase/base/metrics/metrics_test.cc b/libartbase/base/metrics/metrics_test.cc
index 2d69c95..61dfddd 100644
--- a/libartbase/base/metrics/metrics_test.cc
+++ b/libartbase/base/metrics/metrics_test.cc
@@ -272,13 +272,13 @@
class NonZeroBackend : public TestBackendBase {
public:
- void ReportCounter(DatumId counter_type [[maybe_unused]], uint64_t value) override {
+ void ReportCounter([[maybe_unused]] DatumId counter_type, uint64_t value) override {
EXPECT_NE(value, 0u);
}
- void ReportHistogram(DatumId histogram_type [[maybe_unused]],
- int64_t minimum_value [[maybe_unused]],
- int64_t maximum_value [[maybe_unused]],
+ void ReportHistogram([[maybe_unused]] DatumId histogram_type,
+ [[maybe_unused]] int64_t minimum_value,
+ [[maybe_unused]] int64_t maximum_value,
const std::vector<uint32_t>& buckets) override {
bool nonzero = false;
for (const auto value : buckets) {
@@ -296,13 +296,13 @@
class ZeroBackend : public TestBackendBase {
public:
- void ReportCounter(DatumId counter_type [[maybe_unused]], uint64_t value) override {
+ void ReportCounter([[maybe_unused]] DatumId counter_type, uint64_t value) override {
EXPECT_EQ(value, 0u);
}
- void ReportHistogram(DatumId histogram_type [[maybe_unused]],
- int64_t minimum_value [[maybe_unused]],
- int64_t maximum_value [[maybe_unused]],
+ void ReportHistogram([[maybe_unused]] DatumId histogram_type,
+ [[maybe_unused]] int64_t minimum_value,
+ [[maybe_unused]] int64_t maximum_value,
const std::vector<uint32_t>& buckets) override {
for (const auto value : buckets) {
EXPECT_EQ(value, 0u);
@@ -323,13 +323,13 @@
class FirstBackend : public TestBackendBase {
public:
- void ReportCounter(DatumId counter_type [[maybe_unused]], uint64_t value) override {
+ void ReportCounter([[maybe_unused]] DatumId counter_type, uint64_t value) override {
EXPECT_NE(value, 0u);
}
- void ReportHistogram(DatumId histogram_type [[maybe_unused]],
- int64_t minimum_value [[maybe_unused]],
- int64_t maximum_value [[maybe_unused]],
+ void ReportHistogram([[maybe_unused]] DatumId histogram_type,
+ [[maybe_unused]] int64_t minimum_value,
+ [[maybe_unused]] int64_t maximum_value,
const std::vector<uint32_t>& buckets) override {
EXPECT_NE(buckets[0], 0u) << "Bucket 0 should have a non-zero value";
for (size_t i = 1; i < buckets.size(); i++) {
@@ -368,9 +368,9 @@
}
// All histograms are event metrics.
- void ReportHistogram(DatumId histogram_type [[maybe_unused]],
- int64_t minimum_value [[maybe_unused]],
- int64_t maximum_value [[maybe_unused]],
+ void ReportHistogram([[maybe_unused]] DatumId histogram_type,
+ [[maybe_unused]] int64_t minimum_value,
+ [[maybe_unused]] int64_t maximum_value,
const std::vector<uint32_t>& buckets) override {
EXPECT_NE(buckets[0], 0u) << "Bucket 0 should have a non-zero value";
for (size_t i = 1; i < buckets.size(); i++) {
diff --git a/libartbase/base/scoped_arena_allocator.h b/libartbase/base/scoped_arena_allocator.h
index 6de0192..165fb8c 100644
--- a/libartbase/base/scoped_arena_allocator.h
+++ b/libartbase/base/scoped_arena_allocator.h
@@ -171,7 +171,7 @@
size_t ApproximatePeakBytes();
// Allow a delete-expression to destroy but not deallocate allocators created by Create().
- static void operator delete(void* ptr ATTRIBUTE_UNUSED) {}
+ static void operator delete([[maybe_unused]] void* ptr) {}
private:
ArenaStack* arena_stack_;
diff --git a/libartbase/base/scoped_arena_containers.h b/libartbase/base/scoped_arena_containers.h
index 5f0cfe6..d0ff7f5 100644
--- a/libartbase/base/scoped_arena_containers.h
+++ b/libartbase/base/scoped_arena_containers.h
@@ -185,7 +185,7 @@
const_pointer address(const_reference x) const { return &x; }
pointer allocate(size_type n,
- ScopedArenaAllocatorAdapter<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) {
+ [[maybe_unused]] ScopedArenaAllocatorAdapter<void>::pointer hint = nullptr) {
DCHECK_LE(n, max_size());
DebugStackIndirectTopRef::CheckTop();
return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T),
@@ -273,7 +273,7 @@
template <typename T>
class ArenaDelete<T[]> {
public:
- void operator()(T* ptr ATTRIBUTE_UNUSED) const {
+ void operator()([[maybe_unused]] T* ptr) const {
static_assert(std::is_trivially_destructible_v<T>,
"ArenaUniquePtr does not support non-trivially-destructible arrays.");
// TODO: Implement debug checks, and MEMORY_TOOL support.
diff --git a/libartbase/base/transform_array_ref_test.cc b/libartbase/base/transform_array_ref_test.cc
index 4ac6978..e1d8d52 100644
--- a/libartbase/base/transform_array_ref_test.cc
+++ b/libartbase/base/transform_array_ref_test.cc
@@ -30,7 +30,7 @@
int value;
};
-ATTRIBUTE_UNUSED bool operator==(const ValueHolder& lhs, const ValueHolder& rhs) {
+[[maybe_unused]] bool operator==(const ValueHolder& lhs, const ValueHolder& rhs) {
return lhs.value == rhs.value;
}
diff --git a/libartbase/base/unix_file/random_access_file_test.h b/libartbase/base/unix_file/random_access_file_test.h
index 178f89d..0592256 100644
--- a/libartbase/base/unix_file/random_access_file_test.h
+++ b/libartbase/base/unix_file/random_access_file_test.h
@@ -171,8 +171,7 @@
CleanUp(file.get());
}
- virtual void CleanUp(RandomAccessFile* file ATTRIBUTE_UNUSED) {
- }
+ virtual void CleanUp([[maybe_unused]] RandomAccessFile* file) {}
protected:
std::string android_data_;
diff --git a/libartbase/base/utils.h b/libartbase/base/utils.h
index f311f09..5e04cb0 100644
--- a/libartbase/base/utils.h
+++ b/libartbase/base/utils.h
@@ -75,16 +75,13 @@
class VoidFunctor {
public:
template <typename A>
- inline void operator() (A a ATTRIBUTE_UNUSED) const {
- }
+ inline void operator()([[maybe_unused]] A a) const {}
template <typename A, typename B>
- inline void operator() (A a ATTRIBUTE_UNUSED, B b ATTRIBUTE_UNUSED) const {
- }
+ inline void operator()([[maybe_unused]] A a, [[maybe_unused]] B b) const {}
template <typename A, typename B, typename C>
- inline void operator() (A a ATTRIBUTE_UNUSED, B b ATTRIBUTE_UNUSED, C c ATTRIBUTE_UNUSED) const {
- }
+ inline void operator()([[maybe_unused]] A a, [[maybe_unused]] B b, [[maybe_unused]] C c) const {}
};
inline bool TestBitmap(size_t idx, const uint8_t* bitmap) {
diff --git a/libartpalette/system/palette_fake.cc b/libartpalette/system/palette_fake.cc
index 743a4db..accdc4c 100644
--- a/libartpalette/system/palette_fake.cc
+++ b/libartpalette/system/palette_fake.cc
@@ -14,15 +14,13 @@
* limitations under the License.
*/
-#include "palette/palette.h"
+#include <android-base/logging.h>
+#include <stdbool.h>
#include <map>
#include <mutex>
-#include <stdbool.h>
-#include <android-base/logging.h>
-#include <android-base/macros.h> // For ATTRIBUTE_UNUSED
-
+#include "palette/palette.h"
#include "palette_system.h"
// Methods in version 1 API, corresponding to SDK level 31.
@@ -61,28 +59,25 @@
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteTraceBegin(const char* name ATTRIBUTE_UNUSED) {
- return PALETTE_STATUS_OK;
-}
+palette_status_t PaletteTraceBegin([[maybe_unused]] const char* name) { return PALETTE_STATUS_OK; }
palette_status_t PaletteTraceEnd() {
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteTraceIntegerValue(const char* name ATTRIBUTE_UNUSED,
- int32_t value ATTRIBUTE_UNUSED) {
+palette_status_t PaletteTraceIntegerValue([[maybe_unused]] const char* name,
+ [[maybe_unused]] int32_t value) {
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteAshmemCreateRegion(const char* name ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED,
+palette_status_t PaletteAshmemCreateRegion([[maybe_unused]] const char* name,
+ [[maybe_unused]] size_t size,
int* fd) {
*fd = -1;
return PALETTE_STATUS_NOT_SUPPORTED;
}
-palette_status_t PaletteAshmemSetProtRegion(int fd ATTRIBUTE_UNUSED,
- int prot ATTRIBUTE_UNUSED) {
+palette_status_t PaletteAshmemSetProtRegion([[maybe_unused]] int fd, [[maybe_unused]] int prot) {
return PALETTE_STATUS_NOT_SUPPORTED;
}
@@ -96,25 +91,25 @@
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteNotifyStartDex2oatCompilation(int source_fd ATTRIBUTE_UNUSED,
- int art_fd ATTRIBUTE_UNUSED,
- int oat_fd ATTRIBUTE_UNUSED,
- int vdex_fd ATTRIBUTE_UNUSED) {
+palette_status_t PaletteNotifyStartDex2oatCompilation([[maybe_unused]] int source_fd,
+ [[maybe_unused]] int art_fd,
+ [[maybe_unused]] int oat_fd,
+ [[maybe_unused]] int vdex_fd) {
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteNotifyEndDex2oatCompilation(int source_fd ATTRIBUTE_UNUSED,
- int art_fd ATTRIBUTE_UNUSED,
- int oat_fd ATTRIBUTE_UNUSED,
- int vdex_fd ATTRIBUTE_UNUSED) {
+palette_status_t PaletteNotifyEndDex2oatCompilation([[maybe_unused]] int source_fd,
+ [[maybe_unused]] int art_fd,
+ [[maybe_unused]] int oat_fd,
+ [[maybe_unused]] int vdex_fd) {
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteNotifyDexFileLoaded(const char* path ATTRIBUTE_UNUSED) {
+palette_status_t PaletteNotifyDexFileLoaded([[maybe_unused]] const char* path) {
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteNotifyOatFileLoaded(const char* path ATTRIBUTE_UNUSED) {
+palette_status_t PaletteNotifyOatFileLoaded([[maybe_unused]] const char* path) {
return PALETTE_STATUS_OK;
}
@@ -123,33 +118,33 @@
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteNotifyBeginJniInvocation(JNIEnv* env ATTRIBUTE_UNUSED) {
+palette_status_t PaletteNotifyBeginJniInvocation([[maybe_unused]] JNIEnv* env) {
return PALETTE_STATUS_OK;
}
-palette_status_t PaletteNotifyEndJniInvocation(JNIEnv* env ATTRIBUTE_UNUSED) {
+palette_status_t PaletteNotifyEndJniInvocation([[maybe_unused]] JNIEnv* env) {
return PALETTE_STATUS_OK;
}
// Methods in version 2 API, corresponding to SDK level 33.
-palette_status_t PaletteReportLockContention(JNIEnv* env ATTRIBUTE_UNUSED,
- int32_t wait_ms ATTRIBUTE_UNUSED,
- const char* filename ATTRIBUTE_UNUSED,
- int32_t line_number ATTRIBUTE_UNUSED,
- const char* method_name ATTRIBUTE_UNUSED,
- const char* owner_filename ATTRIBUTE_UNUSED,
- int32_t owner_line_number ATTRIBUTE_UNUSED,
- const char* owner_method_name ATTRIBUTE_UNUSED,
- const char* proc_name ATTRIBUTE_UNUSED,
- const char* thread_name ATTRIBUTE_UNUSED) {
+palette_status_t PaletteReportLockContention([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] int32_t wait_ms,
+ [[maybe_unused]] const char* filename,
+ [[maybe_unused]] int32_t line_number,
+ [[maybe_unused]] const char* method_name,
+ [[maybe_unused]] const char* owner_filename,
+ [[maybe_unused]] int32_t owner_line_number,
+ [[maybe_unused]] const char* owner_method_name,
+ [[maybe_unused]] const char* proc_name,
+ [[maybe_unused]] const char* thread_name) {
return PALETTE_STATUS_OK;
}
// Methods in version 3 API, corresponding to SDK level 34.
-palette_status_t PaletteSetTaskProfiles(int32_t tid ATTRIBUTE_UNUSED,
- const char* const profiles[] ATTRIBUTE_UNUSED,
- size_t profiles_len ATTRIBUTE_UNUSED) {
+palette_status_t PaletteSetTaskProfiles([[maybe_unused]] int32_t tid,
+ [[maybe_unused]] const char* const profiles[],
+ [[maybe_unused]] size_t profiles_len) {
return PALETTE_STATUS_OK;
}
diff --git a/libdexfile/dex/code_item_accessors-inl.h b/libdexfile/dex/code_item_accessors-inl.h
index 1e33002..b740460 100644
--- a/libdexfile/dex/code_item_accessors-inl.h
+++ b/libdexfile/dex/code_item_accessors-inl.h
@@ -178,8 +178,7 @@
template <>
inline void CodeItemDebugInfoAccessor::Init<StandardDexFile::CodeItem>(
- const StandardDexFile::CodeItem& code_item,
- uint32_t dex_method_index ATTRIBUTE_UNUSED) {
+ const StandardDexFile::CodeItem& code_item, [[maybe_unused]] uint32_t dex_method_index) {
debug_info_offset_ = code_item.debug_info_off_;
CodeItemDataAccessor::Init(code_item);
}
diff --git a/libdexfile/dex/dex_file_verifier.cc b/libdexfile/dex/dex_file_verifier.cc
index 42eff2c..e16f72b 100644
--- a/libdexfile/dex/dex_file_verifier.cc
+++ b/libdexfile/dex/dex_file_verifier.cc
@@ -50,9 +50,7 @@
return (high == 0) || ((high == 0xffffU) && (low == 0xffffU));
}
-constexpr bool IsValidTypeId(uint16_t low ATTRIBUTE_UNUSED, uint16_t high) {
- return (high == 0);
-}
+constexpr bool IsValidTypeId([[maybe_unused]] uint16_t low, uint16_t high) { return (high == 0); }
constexpr uint32_t MapTypeToBitMask(DexFile::MapItemType map_item_type) {
switch (map_item_type) {
diff --git a/libdexfile/dex/dex_file_verifier_test.cc b/libdexfile/dex/dex_file_verifier_test.cc
index d31635e..a2f2e93 100644
--- a/libdexfile/dex/dex_file_verifier_test.cc
+++ b/libdexfile/dex/dex_file_verifier_test.cc
@@ -1454,7 +1454,7 @@
VerifyModification(
kClassExtendsItselfTestDex,
"class_extends_itself",
- [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ },
+ []([[maybe_unused]] DexFile* dex_file) { /* empty */ },
"Class with same type idx as its superclass: '0'");
}
@@ -1479,7 +1479,7 @@
VerifyModification(
kClassesExtendOneAnotherTestDex,
"classes_extend_one_another",
- [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ },
+ []([[maybe_unused]] DexFile* dex_file) { /* empty */ },
"Invalid class definition ordering: class with type idx: '1' defined before"
" superclass with type idx: '0'");
}
@@ -1511,7 +1511,7 @@
VerifyModification(
kCircularClassInheritanceTestDex,
"circular_class_inheritance",
- [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ },
+ []([[maybe_unused]] DexFile* dex_file) { /* empty */ },
"Invalid class definition ordering: class with type idx: '1' defined before"
" superclass with type idx: '0'");
}
@@ -1534,7 +1534,7 @@
VerifyModification(
kInterfaceImplementsItselfTestDex,
"interface_implements_itself",
- [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ },
+ []([[maybe_unused]] DexFile* dex_file) { /* empty */ },
"Class with same type idx as implemented interface: '0'");
}
@@ -1562,7 +1562,7 @@
VerifyModification(
kInterfacesImplementOneAnotherTestDex,
"interfaces_implement_one_another",
- [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ },
+ []([[maybe_unused]] DexFile* dex_file) { /* empty */ },
"Invalid class definition ordering: class with type idx: '1' defined before"
" implemented interface with type idx: '0'");
}
@@ -1598,7 +1598,7 @@
VerifyModification(
kCircularInterfaceImplementationTestDex,
"circular_interface_implementation",
- [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ },
+ []([[maybe_unused]] DexFile* dex_file) { /* empty */ },
"Invalid class definition ordering: class with type idx: '2' defined before"
" implemented interface with type idx: '0'");
}
diff --git a/libdexfile/dex/utf.cc b/libdexfile/dex/utf.cc
index 9692a26..bcda8ca 100644
--- a/libdexfile/dex/utf.cc
+++ b/libdexfile/dex/utf.cc
@@ -209,7 +209,7 @@
size_t CountModifiedUtf8BytesInUtf16(const uint16_t* chars, size_t char_count) {
// FIXME: We should not emit 4-byte sequences. Bug: 192935764
size_t result = 0;
- auto append = [&](char c ATTRIBUTE_UNUSED) { ++result; };
+ auto append = [&]([[maybe_unused]] char c) { ++result; };
ConvertUtf16ToUtf8</*kUseShortZero=*/ false,
/*kUse4ByteSequence=*/ true,
/*kReplaceBadSurrogates=*/ false>(chars, char_count, append);
diff --git a/libelffile/elf/elf_utils.h b/libelffile/elf/elf_utils.h
index 46b25b0..e101920 100644
--- a/libelffile/elf/elf_utils.h
+++ b/libelffile/elf/elf_utils.h
@@ -96,7 +96,7 @@
}
static inline bool IsDynamicSectionPointer(Elf32_Word d_tag,
- Elf32_Word e_machine ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] Elf32_Word e_machine) {
// TODO: Remove the `e_machine` parameter from API (not needed after Mips target was removed).
switch (d_tag) {
// case 1: well known d_tag values that imply Elf32_Dyn.d_un contains an address in d_ptr
diff --git a/libprofile/profile/profile_compilation_info.cc b/libprofile/profile/profile_compilation_info.cc
index 30b0a1c..af007d1 100644
--- a/libprofile/profile/profile_compilation_info.cc
+++ b/libprofile/profile/profile_compilation_info.cc
@@ -2507,8 +2507,7 @@
}
bool ProfileCompilationInfo::ProfileFilterFnAcceptAll(
- const std::string& dex_location ATTRIBUTE_UNUSED,
- uint32_t checksum ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] const std::string& dex_location, [[maybe_unused]] uint32_t checksum) {
return true;
}
diff --git a/odrefresh/odr_fs_utils.cc b/odrefresh/odr_fs_utils.cc
index 3ed8021..2a77e52 100644
--- a/odrefresh/odr_fs_utils.cc
+++ b/odrefresh/odr_fs_utils.cc
@@ -43,7 +43,7 @@
// Callback for use with nftw(3) to assist with clearing files and sub-directories.
// This method removes files and directories below the top-level directory passed to nftw().
static int NftwCleanUpCallback(const char* fpath,
- const struct stat* sb ATTRIBUTE_UNUSED,
+ [[maybe_unused]] const struct stat* sb,
int typeflag,
struct FTW* ftwbuf) {
switch (typeflag) {
diff --git a/odrefresh/odrefresh_broken.cc b/odrefresh/odrefresh_broken.cc
index 7b7d095..6657a10 100644
--- a/odrefresh/odrefresh_broken.cc
+++ b/odrefresh/odrefresh_broken.cc
@@ -17,8 +17,7 @@
#include <android-base/macros.h>
#include <odrefresh/odrefresh.h>
-
-int main(int argc ATTRIBUTE_UNUSED, char** argv ATTRIBUTE_UNUSED) {
+int main([[maybe_unused]] int argc, [[maybe_unused]] char** argv) {
// Return a value that will make odsign just cleanup all potential existing /data
// artifacts.
return art::odrefresh::ExitCode::kCleanupFailed;
diff --git a/openjdkjvm/OpenjdkJvm.cc b/openjdkjvm/OpenjdkJvm.cc
index da6fc85..c0f874f 100644
--- a/openjdkjvm/OpenjdkJvm.cc
+++ b/openjdkjvm/OpenjdkJvm.cc
@@ -204,8 +204,7 @@
return dlsym(handle, name);
}
-JNIEXPORT jlong JVM_CurrentTimeMillis(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass clazz ATTRIBUTE_UNUSED) {
+JNIEXPORT jlong JVM_CurrentTimeMillis([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass clazz) {
struct timeval tv;
gettimeofday(&tv, (struct timezone *) nullptr);
jlong when = tv.tv_sec * 1000LL + tv.tv_usec / 1000;
@@ -216,9 +215,9 @@
* See the spec of this function in jdk.internal.misc.VM.
* @return -1 if the system time isn't within +/- 2^32 seconds from offset_secs
*/
-JNIEXPORT jlong JVM_GetNanoTimeAdjustment(JNIEnv *ATTRIBUTE_UNUSED,
- jclass ATTRIBUTE_UNUSED,
- jlong offset_secs) {
+JNIEXPORT jlong JVM_GetNanoTimeAdjustment([[maybe_unused]] JNIEnv*,
+ [[maybe_unused]] jclass,
+ jlong offset_secs) {
struct timeval tv;
// Note that we don't want the elapsed time here, but the system clock.
// gettimeofday() doesn't provide nanosecond-level precision.
@@ -388,19 +387,21 @@
}
}
-JNIEXPORT void JVM_Yield(JNIEnv* env ATTRIBUTE_UNUSED, jclass threadClass ATTRIBUTE_UNUSED) {
+JNIEXPORT void JVM_Yield([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass threadClass) {
sched_yield();
}
-JNIEXPORT void JVM_Sleep(JNIEnv* env, jclass threadClass ATTRIBUTE_UNUSED,
- jobject java_lock, jlong millis) {
+JNIEXPORT void JVM_Sleep(JNIEnv* env,
+ [[maybe_unused]] jclass threadClass,
+ jobject java_lock,
+ jlong millis) {
art::ScopedFastNativeObjectAccess soa(env);
art::ObjPtr<art::mirror::Object> lock = soa.Decode<art::mirror::Object>(java_lock);
art::Monitor::Wait(
art::Thread::Current(), lock.Ptr(), millis, 0, true, art::ThreadState::kSleeping);
}
-JNIEXPORT jobject JVM_CurrentThread(JNIEnv* env, jclass unused ATTRIBUTE_UNUSED) {
+JNIEXPORT jobject JVM_CurrentThread(JNIEnv* env, [[maybe_unused]] jclass unused) {
art::ScopedFastNativeObjectAccess soa(env);
return soa.AddLocalReference<jobject>(soa.Self()->GetPeer());
}
@@ -425,7 +426,7 @@
}
}
-JNIEXPORT jboolean JVM_HoldsLock(JNIEnv* env, jclass unused ATTRIBUTE_UNUSED, jobject jobj) {
+JNIEXPORT jboolean JVM_HoldsLock(JNIEnv* env, [[maybe_unused]] jclass unused, jobject jobj) {
art::ScopedObjectAccess soa(env);
art::ObjPtr<art::mirror::Object> object = soa.Decode<art::mirror::Object>(jobj);
if (object == nullptr) {
@@ -436,20 +437,21 @@
}
JNIEXPORT __attribute__((noreturn)) void JVM_SetNativeThreadName(
- JNIEnv* env ATTRIBUTE_UNUSED,
- jobject jthread ATTRIBUTE_UNUSED,
- jstring java_name ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jobject jthread,
+ [[maybe_unused]] jstring java_name) {
UNIMPLEMENTED(FATAL) << "JVM_SetNativeThreadName is not implemented";
UNREACHABLE();
}
-JNIEXPORT __attribute__((noreturn)) jint JVM_IHashCode(JNIEnv* env ATTRIBUTE_UNUSED,
- jobject javaObject ATTRIBUTE_UNUSED) {
+JNIEXPORT __attribute__((noreturn)) jint JVM_IHashCode([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jobject javaObject) {
UNIMPLEMENTED(FATAL) << "JVM_IHashCode is not implemented";
UNREACHABLE();
}
-JNIEXPORT __attribute__((noreturn)) jlong JVM_NanoTime(JNIEnv* env ATTRIBUTE_UNUSED, jclass unused ATTRIBUTE_UNUSED) {
+JNIEXPORT __attribute__((noreturn)) jlong JVM_NanoTime([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass unused) {
UNIMPLEMENTED(FATAL) << "JVM_NanoTime is not implemented";
UNREACHABLE();
}
@@ -461,17 +463,18 @@
UNREACHABLE();
}
-JNIEXPORT __attribute__((noreturn)) jint JVM_FindSignal(const char* name ATTRIBUTE_UNUSED) {
+JNIEXPORT __attribute__((noreturn)) jint JVM_FindSignal([[maybe_unused]] const char* name) {
LOG(FATAL) << "JVM_FindSignal is not implemented";
UNREACHABLE();
}
-JNIEXPORT __attribute__((noreturn)) void* JVM_RegisterSignal(jint signum ATTRIBUTE_UNUSED, void* handler ATTRIBUTE_UNUSED) {
+JNIEXPORT __attribute__((noreturn)) void* JVM_RegisterSignal([[maybe_unused]] jint signum,
+ [[maybe_unused]] void* handler) {
LOG(FATAL) << "JVM_RegisterSignal is not implemented";
UNREACHABLE();
}
-JNIEXPORT __attribute__((noreturn)) jboolean JVM_RaiseSignal(jint signum ATTRIBUTE_UNUSED) {
+JNIEXPORT __attribute__((noreturn)) jboolean JVM_RaiseSignal([[maybe_unused]] jint signum) {
LOG(FATAL) << "JVM_RaiseSignal is not implemented";
UNREACHABLE();
}
diff --git a/openjdkjvmti/OpenjdkJvmTi.cc b/openjdkjvmti/OpenjdkJvmTi.cc
index 276b3a8..1e76368 100644
--- a/openjdkjvmti/OpenjdkJvmTi.cc
+++ b/openjdkjvmti/OpenjdkJvmTi.cc
@@ -472,9 +472,9 @@
static jvmtiError IterateOverObjectsReachableFromObject(
jvmtiEnv* env,
- jobject object ATTRIBUTE_UNUSED,
- jvmtiObjectReferenceCallback object_reference_callback ATTRIBUTE_UNUSED,
- const void* user_data ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jobject object,
+ [[maybe_unused]] jvmtiObjectReferenceCallback object_reference_callback,
+ [[maybe_unused]] const void* user_data) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_tag_objects);
return ERR(NOT_IMPLEMENTED);
@@ -482,19 +482,19 @@
static jvmtiError IterateOverReachableObjects(
jvmtiEnv* env,
- jvmtiHeapRootCallback heap_root_callback ATTRIBUTE_UNUSED,
- jvmtiStackReferenceCallback stack_ref_callback ATTRIBUTE_UNUSED,
- jvmtiObjectReferenceCallback object_ref_callback ATTRIBUTE_UNUSED,
- const void* user_data ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jvmtiHeapRootCallback heap_root_callback,
+ [[maybe_unused]] jvmtiStackReferenceCallback stack_ref_callback,
+ [[maybe_unused]] jvmtiObjectReferenceCallback object_ref_callback,
+ [[maybe_unused]] const void* user_data) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_tag_objects);
return ERR(NOT_IMPLEMENTED);
}
static jvmtiError IterateOverHeap(jvmtiEnv* env,
- jvmtiHeapObjectFilter object_filter ATTRIBUTE_UNUSED,
- jvmtiHeapObjectCallback heap_object_callback ATTRIBUTE_UNUSED,
- const void* user_data ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jvmtiHeapObjectFilter object_filter,
+ [[maybe_unused]] jvmtiHeapObjectCallback heap_object_callback,
+ [[maybe_unused]] const void* user_data) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_tag_objects);
return ERR(NOT_IMPLEMENTED);
@@ -730,10 +730,10 @@
}
static jvmtiError GetConstantPool(jvmtiEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
- jint* constant_pool_count_ptr ATTRIBUTE_UNUSED,
- jint* constant_pool_byte_count_ptr ATTRIBUTE_UNUSED,
- unsigned char** constant_pool_bytes_ptr ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass klass,
+ [[maybe_unused]] jint* constant_pool_count_ptr,
+ [[maybe_unused]] jint* constant_pool_byte_count_ptr,
+ [[maybe_unused]] unsigned char** constant_pool_bytes_ptr) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_get_constant_pool);
return ERR(NOT_IMPLEMENTED);
@@ -926,15 +926,15 @@
return MethodUtil::IsMethodObsolete(env, method, is_obsolete_ptr);
}
- static jvmtiError SetNativeMethodPrefix(jvmtiEnv* env, const char* prefix ATTRIBUTE_UNUSED) {
+ static jvmtiError SetNativeMethodPrefix(jvmtiEnv* env, [[maybe_unused]] const char* prefix) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_set_native_method_prefix);
return ERR(NOT_IMPLEMENTED);
}
static jvmtiError SetNativeMethodPrefixes(jvmtiEnv* env,
- jint prefix_count ATTRIBUTE_UNUSED,
- char** prefixes ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jint prefix_count,
+ [[maybe_unused]] char** prefixes) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_set_native_method_prefix);
return ERR(NOT_IMPLEMENTED);
@@ -1032,8 +1032,7 @@
mode);
}
- static jvmtiError GenerateEvents(jvmtiEnv* env,
- jvmtiEvent event_type ATTRIBUTE_UNUSED) {
+ static jvmtiError GenerateEvents(jvmtiEnv* env, [[maybe_unused]] jvmtiEvent event_type) {
ENSURE_VALID_ENV(env);
return OK;
}
@@ -1195,28 +1194,28 @@
}
static jvmtiError GetCurrentThreadCpuTimerInfo(jvmtiEnv* env,
- jvmtiTimerInfo* info_ptr ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jvmtiTimerInfo* info_ptr) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_get_current_thread_cpu_time);
return ERR(NOT_IMPLEMENTED);
}
- static jvmtiError GetCurrentThreadCpuTime(jvmtiEnv* env, jlong* nanos_ptr ATTRIBUTE_UNUSED) {
+ static jvmtiError GetCurrentThreadCpuTime(jvmtiEnv* env, [[maybe_unused]] jlong* nanos_ptr) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_get_current_thread_cpu_time);
return ERR(NOT_IMPLEMENTED);
}
static jvmtiError GetThreadCpuTimerInfo(jvmtiEnv* env,
- jvmtiTimerInfo* info_ptr ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jvmtiTimerInfo* info_ptr) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_get_thread_cpu_time);
return ERR(NOT_IMPLEMENTED);
}
static jvmtiError GetThreadCpuTime(jvmtiEnv* env,
- jthread thread ATTRIBUTE_UNUSED,
- jlong* nanos_ptr ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jthread thread,
+ [[maybe_unused]] jlong* nanos_ptr) {
ENSURE_VALID_ENV(env);
ENSURE_HAS_CAP(env, can_get_thread_cpu_time);
return ERR(NOT_IMPLEMENTED);
diff --git a/openjdkjvmti/alloc_manager.cc b/openjdkjvmti/alloc_manager.cc
index 5910073..b20e098 100644
--- a/openjdkjvmti/alloc_manager.cc
+++ b/openjdkjvmti/alloc_manager.cc
@@ -198,9 +198,8 @@
// Force every thread to either be suspended or pass through a barrier.
art::ScopedThreadSuspension sts(self, art::ThreadState::kSuspended);
art::Barrier barrier(0);
- art::FunctionClosure fc([&](art::Thread* thr ATTRIBUTE_UNUSED) {
- barrier.Pass(art::Thread::Current());
- });
+ art::FunctionClosure fc(
+ [&]([[maybe_unused]] art::Thread* thr) { barrier.Pass(art::Thread::Current()); });
size_t requested = art::Runtime::Current()->GetThreadList()->RunCheckpoint(&fc);
barrier.Increment(self, requested);
}
diff --git a/openjdkjvmti/events-inl.h b/openjdkjvmti/events-inl.h
index 92cfbc9..1e309b4 100644
--- a/openjdkjvmti/events-inl.h
+++ b/openjdkjvmti/events-inl.h
@@ -360,8 +360,9 @@
inline bool EventHandler::ShouldDispatch<ArtJvmtiEvent::kBreakpoint>(
ArtJvmTiEnv* env,
art::Thread* thread,
- JNIEnv* jnienv ATTRIBUTE_UNUSED,
- jthread jni_thread ATTRIBUTE_UNUSED,
+
+ [[maybe_unused]] JNIEnv* jnienv,
+ [[maybe_unused]] jthread jni_thread,
jmethodID jmethod,
jlocation location) const {
art::ReaderMutexLock lk(art::Thread::Current(), env->event_info_mutex_);
@@ -374,10 +375,10 @@
inline bool EventHandler::ShouldDispatch<ArtJvmtiEvent::kFramePop>(
ArtJvmTiEnv* env,
art::Thread* thread,
- JNIEnv* jnienv ATTRIBUTE_UNUSED,
- jthread jni_thread ATTRIBUTE_UNUSED,
- jmethodID jmethod ATTRIBUTE_UNUSED,
- jboolean is_exception ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIEnv* jnienv,
+ [[maybe_unused]] jthread jni_thread,
+ [[maybe_unused]] jmethodID jmethod,
+ [[maybe_unused]] jboolean is_exception,
const art::ShadowFrame* frame) const {
// Search for the frame. Do this before checking if we need to send the event so that we don't
// have to deal with use-after-free or the frames being reallocated later.
@@ -395,15 +396,15 @@
inline bool EventHandler::ShouldDispatch<ArtJvmtiEvent::kFieldModification>(
ArtJvmTiEnv* env,
art::Thread* thread,
- JNIEnv* jnienv ATTRIBUTE_UNUSED,
- jthread jni_thread ATTRIBUTE_UNUSED,
- jmethodID method ATTRIBUTE_UNUSED,
- jlocation location ATTRIBUTE_UNUSED,
- jclass field_klass ATTRIBUTE_UNUSED,
- jobject object ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIEnv* jnienv,
+ [[maybe_unused]] jthread jni_thread,
+ [[maybe_unused]] jmethodID method,
+ [[maybe_unused]] jlocation location,
+ [[maybe_unused]] jclass field_klass,
+ [[maybe_unused]] jobject object,
jfieldID field,
- char type_char ATTRIBUTE_UNUSED,
- jvalue val ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] char type_char,
+ [[maybe_unused]] jvalue val) const {
art::ReaderMutexLock lk(art::Thread::Current(), env->event_info_mutex_);
return ShouldDispatchOnThread<ArtJvmtiEvent::kFieldModification>(env, thread) &&
env->modify_watched_fields.find(
@@ -414,12 +415,12 @@
inline bool EventHandler::ShouldDispatch<ArtJvmtiEvent::kFieldAccess>(
ArtJvmTiEnv* env,
art::Thread* thread,
- JNIEnv* jnienv ATTRIBUTE_UNUSED,
- jthread jni_thread ATTRIBUTE_UNUSED,
- jmethodID method ATTRIBUTE_UNUSED,
- jlocation location ATTRIBUTE_UNUSED,
- jclass field_klass ATTRIBUTE_UNUSED,
- jobject object ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIEnv* jnienv,
+ [[maybe_unused]] jthread jni_thread,
+ [[maybe_unused]] jmethodID method,
+ [[maybe_unused]] jlocation location,
+ [[maybe_unused]] jclass field_klass,
+ [[maybe_unused]] jobject object,
jfieldID field) const {
art::ReaderMutexLock lk(art::Thread::Current(), env->event_info_mutex_);
return ShouldDispatchOnThread<ArtJvmtiEvent::kFieldAccess>(env, thread) &&
@@ -439,7 +440,7 @@
jthread jni_thread,
jmethodID jmethod,
jboolean is_exception,
- const art::ShadowFrame* frame ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] const art::ShadowFrame* frame) {
ExecuteCallback<ArtJvmtiEvent::kFramePop>(event, jnienv, jni_thread, jmethod, is_exception);
}
@@ -628,10 +629,10 @@
return dispatch;
}
-template <ArtJvmtiEvent kEvent, typename ...Args>
+template <ArtJvmtiEvent kEvent, typename... Args>
inline bool EventHandler::ShouldDispatch(ArtJvmTiEnv* env,
art::Thread* thread,
- Args... args ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] Args... args) const {
static_assert(std::is_same<typename impl::EventFnType<kEvent>::type,
void(*)(jvmtiEnv*, Args...)>::value,
"Unexpected different type of shouldDispatch");
diff --git a/openjdkjvmti/events.cc b/openjdkjvmti/events.cc
index 64da6ed..31107d0 100644
--- a/openjdkjvmti/events.cc
+++ b/openjdkjvmti/events.cc
@@ -737,9 +737,7 @@
// Call-back for when a method is popped due to an exception throw. A method will either cause a
// MethodExited call-back or a MethodUnwind call-back when its activation is removed.
- void MethodUnwind(art::Thread* self,
- art::ArtMethod* method,
- uint32_t dex_pc ATTRIBUTE_UNUSED)
+ void MethodUnwind(art::Thread* self, art::ArtMethod* method, [[maybe_unused]] uint32_t dex_pc)
REQUIRES_SHARED(art::Locks::mutator_lock_) override {
if (!method->IsRuntimeMethod() &&
event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
@@ -767,10 +765,9 @@
// Call-back for when the dex pc moves in a method.
void DexPcMoved(art::Thread* self,
- art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
+ [[maybe_unused]] art::Handle<art::mirror::Object> this_object,
art::ArtMethod* method,
- uint32_t new_dex_pc)
- REQUIRES_SHARED(art::Locks::mutator_lock_) override {
+ uint32_t new_dex_pc) REQUIRES_SHARED(art::Locks::mutator_lock_) override {
DCHECK(!method->IsRuntimeMethod());
// Default methods might be copied to multiple classes. We need to get the canonical version of
// this method so that we can check for breakpoints correctly.
@@ -1034,10 +1031,10 @@
}
// Call-back for when we execute a branch.
- void Branch(art::Thread* self ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- int32_t dex_pc_offset ATTRIBUTE_UNUSED)
+ void Branch([[maybe_unused]] art::Thread* self,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] int32_t dex_pc_offset)
REQUIRES_SHARED(art::Locks::mutator_lock_) override {
return;
}
diff --git a/openjdkjvmti/jvmti_allocator.h b/openjdkjvmti/jvmti_allocator.h
index 4adf769..618a661 100644
--- a/openjdkjvmti/jvmti_allocator.h
+++ b/openjdkjvmti/jvmti_allocator.h
@@ -110,7 +110,7 @@
pointer address(reference x) const { return &x; }
const_pointer address(const_reference x) const { return &x; }
- pointer allocate(size_type n, JvmtiAllocator<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) {
+ pointer allocate(size_type n, [[maybe_unused]] JvmtiAllocator<void>::pointer hint = nullptr) {
DCHECK_LE(n, max_size());
if (env_ == nullptr) {
T* result = reinterpret_cast<T*>(AllocUtil::AllocateImpl(n * sizeof(T)));
@@ -123,7 +123,7 @@
return reinterpret_cast<T*>(result);
}
}
- void deallocate(pointer p, size_type n ATTRIBUTE_UNUSED) {
+ void deallocate(pointer p, [[maybe_unused]] size_type n) {
if (env_ == nullptr) {
AllocUtil::DeallocateImpl(reinterpret_cast<unsigned char*>(p));
} else {
diff --git a/openjdkjvmti/jvmti_weak_table-inl.h b/openjdkjvmti/jvmti_weak_table-inl.h
index c5663e5..7502ad2 100644
--- a/openjdkjvmti/jvmti_weak_table-inl.h
+++ b/openjdkjvmti/jvmti_weak_table-inl.h
@@ -68,10 +68,10 @@
update_since_last_sweep_ = true;
auto WithReadBarrierUpdater = [&](const art::GcRoot<art::mirror::Object>& original_root,
- art::mirror::Object* original_obj ATTRIBUTE_UNUSED)
- REQUIRES_SHARED(art::Locks::mutator_lock_) {
- return original_root.Read<art::kWithReadBarrier>();
- };
+ [[maybe_unused]] art::mirror::Object* original_obj)
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ return original_root.Read<art::kWithReadBarrier>();
+ };
UpdateTableWith<decltype(WithReadBarrierUpdater), kIgnoreNull>(WithReadBarrierUpdater);
}
@@ -198,7 +198,7 @@
art::Thread* self = art::Thread::Current();
art::MutexLock mu(self, allow_disallow_lock_);
- auto IsMarkedUpdater = [&](const art::GcRoot<art::mirror::Object>& original_root ATTRIBUTE_UNUSED,
+ auto IsMarkedUpdater = [&]([[maybe_unused]] const art::GcRoot<art::mirror::Object>& original_root,
art::mirror::Object* original_obj) {
return visitor->IsMarked(original_obj);
};
diff --git a/openjdkjvmti/jvmti_weak_table.h b/openjdkjvmti/jvmti_weak_table.h
index 674b2a3..8f8d89b 100644
--- a/openjdkjvmti/jvmti_weak_table.h
+++ b/openjdkjvmti/jvmti_weak_table.h
@@ -128,7 +128,7 @@
return false;
}
// If DoesHandleNullOnSweep returns true, this function will be called.
- virtual void HandleNullSweep(T tag ATTRIBUTE_UNUSED) {}
+ virtual void HandleNullSweep([[maybe_unused]] T tag) {}
private:
ALWAYS_INLINE
diff --git a/openjdkjvmti/ti_allocator.cc b/openjdkjvmti/ti_allocator.cc
index 575558d..1e6d462 100644
--- a/openjdkjvmti/ti_allocator.cc
+++ b/openjdkjvmti/ti_allocator.cc
@@ -47,7 +47,7 @@
std::atomic<jlong> AllocUtil::allocated;
-jvmtiError AllocUtil::GetGlobalJvmtiAllocationState(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError AllocUtil::GetGlobalJvmtiAllocationState([[maybe_unused]] jvmtiEnv* env,
jlong* allocated_ptr) {
if (allocated_ptr == nullptr) {
return ERR(NULL_POINTER);
@@ -56,7 +56,7 @@
return OK;
}
-jvmtiError AllocUtil::Allocate(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError AllocUtil::Allocate([[maybe_unused]] jvmtiEnv* env,
jlong size,
unsigned char** mem_ptr) {
if (size < 0) {
@@ -80,7 +80,7 @@
return ret;
}
-jvmtiError AllocUtil::Deallocate(jvmtiEnv* env ATTRIBUTE_UNUSED, unsigned char* mem) {
+jvmtiError AllocUtil::Deallocate([[maybe_unused]] jvmtiEnv* env, unsigned char* mem) {
DeallocateImpl(mem);
return OK;
}
diff --git a/openjdkjvmti/ti_class.cc b/openjdkjvmti/ti_class.cc
index 3d44516..7ded350 100644
--- a/openjdkjvmti/ti_class.cc
+++ b/openjdkjvmti/ti_class.cc
@@ -162,10 +162,10 @@
art::Handle<art::mirror::Class> klass,
art::Handle<art::mirror::ClassLoader> class_loader,
const art::DexFile& initial_dex_file,
- const art::dex::ClassDef& initial_class_def ATTRIBUTE_UNUSED,
- /*out*/art::DexFile const** final_dex_file,
- /*out*/art::dex::ClassDef const** final_class_def)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ [[maybe_unused]] const art::dex::ClassDef& initial_class_def,
+ /*out*/ art::DexFile const** final_dex_file,
+ /*out*/ art::dex::ClassDef const** final_class_def) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
bool is_enabled =
event_handler->IsEventEnabledAnywhere(ArtJvmtiEvent::kClassFileLoadHookRetransformable) ||
event_handler->IsEventEnabledAnywhere(ArtJvmtiEvent::kClassFileLoadHookNonRetransformable);
@@ -387,8 +387,7 @@
void VisitRoots(art::mirror::Object*** roots,
size_t count,
- const art::RootInfo& info ATTRIBUTE_UNUSED)
- override {
+ [[maybe_unused]] const art::RootInfo& info) override {
for (size_t i = 0; i != count; ++i) {
if (*roots[i] == input_) {
*roots[i] = output_;
@@ -398,8 +397,8 @@
void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
size_t count,
- const art::RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ [[maybe_unused]] const art::RootInfo& info) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
for (size_t i = 0; i != count; ++i) {
if (roots[i]->AsMirrorPtr() == input_) {
roots[i]->Assign(output_);
@@ -476,7 +475,7 @@
void operator()(art::mirror::Object* src,
art::MemberOffset field_offset,
- bool is_static ATTRIBUTE_UNUSED) const
+ [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(art::Locks::mutator_lock_) {
art::mirror::HeapReference<art::mirror::Object>* trg =
src->GetFieldObjectReferenceAddr(field_offset);
@@ -487,7 +486,7 @@
}
}
- void operator()(art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
+ void operator()([[maybe_unused]] art::ObjPtr<art::mirror::Class> klass,
art::ObjPtr<art::mirror::Reference> reference) const
REQUIRES_SHARED(art::Locks::mutator_lock_) {
art::mirror::Object* val = reference->GetReferent();
@@ -496,13 +495,13 @@
}
}
- void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
- const {
+ void VisitRoot(
+ [[maybe_unused]] art::mirror::CompressedReference<art::mirror::Object>* root) const {
LOG(FATAL) << "Unreachable";
}
void VisitRootIfNonNull(
- art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] art::mirror::CompressedReference<art::mirror::Object>* root) const {
LOG(FATAL) << "Unreachable";
}
@@ -623,7 +622,7 @@
if (art::kIsDebugBuild) {
size_t count = 0;
- for (auto& m ATTRIBUTE_UNUSED : klass->GetDeclaredMethods(art::kRuntimePointerSize)) {
+ for ([[maybe_unused]] auto& m : klass->GetDeclaredMethods(art::kRuntimePointerSize)) {
count++;
}
CHECK_EQ(count, klass->NumDirectMethods() + klass->NumDeclaredVirtualMethods());
@@ -747,7 +746,7 @@
return ERR(NONE);
}
-jvmtiError ClassUtil::GetClassStatus(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ClassUtil::GetClassStatus([[maybe_unused]] jvmtiEnv* env,
jclass jklass,
jint* status_ptr) {
art::ScopedObjectAccess soa(art::Thread::Current());
@@ -798,7 +797,7 @@
return ERR(NONE);
}
-jvmtiError ClassUtil::IsInterface(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ClassUtil::IsInterface([[maybe_unused]] jvmtiEnv* env,
jclass jklass,
jboolean* is_interface_ptr) {
auto test = [](art::ObjPtr<art::mirror::Class> klass) REQUIRES_SHARED(art::Locks::mutator_lock_) {
@@ -807,7 +806,7 @@
return ClassIsT(jklass, test, is_interface_ptr);
}
-jvmtiError ClassUtil::IsArrayClass(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ClassUtil::IsArrayClass([[maybe_unused]] jvmtiEnv* env,
jclass jklass,
jboolean* is_array_class_ptr) {
auto test = [](art::ObjPtr<art::mirror::Class> klass) REQUIRES_SHARED(art::Locks::mutator_lock_) {
@@ -834,7 +833,7 @@
return art::mirror::Class::GetInnerClassFlags(h_klass, modifiers);
}
-jvmtiError ClassUtil::GetClassModifiers(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ClassUtil::GetClassModifiers([[maybe_unused]] jvmtiEnv* env,
jclass jklass,
jint* modifiers_ptr) {
art::ScopedObjectAccess soa(art::Thread::Current());
@@ -852,7 +851,7 @@
return ERR(NONE);
}
-jvmtiError ClassUtil::GetClassLoader(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ClassUtil::GetClassLoader([[maybe_unused]] jvmtiEnv* env,
jclass jklass,
jobject* classloader_ptr) {
art::ScopedObjectAccess soa(art::Thread::Current());
@@ -1047,7 +1046,7 @@
return ERR(NONE);
}
-jvmtiError ClassUtil::GetClassVersionNumbers(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ClassUtil::GetClassVersionNumbers([[maybe_unused]] jvmtiEnv* env,
jclass jklass,
jint* minor_version_ptr,
jint* major_version_ptr) {
diff --git a/openjdkjvmti/ti_field.cc b/openjdkjvmti/ti_field.cc
index d4c0ec8..4e39e22 100644
--- a/openjdkjvmti/ti_field.cc
+++ b/openjdkjvmti/ti_field.cc
@@ -200,7 +200,7 @@
return ERR(NONE);
}
-jvmtiError FieldUtil::GetFieldDeclaringClass(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError FieldUtil::GetFieldDeclaringClass([[maybe_unused]] jvmtiEnv* env,
jclass klass,
jfieldID field,
jclass* declaring_class_ptr) {
@@ -223,7 +223,7 @@
return ERR(NONE);
}
-jvmtiError FieldUtil::GetFieldModifiers(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError FieldUtil::GetFieldModifiers([[maybe_unused]] jvmtiEnv* env,
jclass klass,
jfieldID field,
jint* modifiers_ptr) {
@@ -246,7 +246,7 @@
return ERR(NONE);
}
-jvmtiError FieldUtil::IsFieldSynthetic(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError FieldUtil::IsFieldSynthetic([[maybe_unused]] jvmtiEnv* env,
jclass klass,
jfieldID field,
jboolean* is_synthetic_ptr) {
diff --git a/openjdkjvmti/ti_heap.cc b/openjdkjvmti/ti_heap.cc
index 01864cd..4be7922 100644
--- a/openjdkjvmti/ti_heap.cc
+++ b/openjdkjvmti/ti_heap.cc
@@ -211,11 +211,11 @@
}
template <typename UserData>
-bool VisitorFalse(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
- art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
- art::ArtField& field ATTRIBUTE_UNUSED,
- size_t field_index ATTRIBUTE_UNUSED,
- UserData* user_data ATTRIBUTE_UNUSED) {
+bool VisitorFalse([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj,
+ [[maybe_unused]] art::ObjPtr<art::mirror::Class> klass,
+ [[maybe_unused]] art::ArtField& field,
+ [[maybe_unused]] size_t field_index,
+ [[maybe_unused]] UserData* user_data) {
return false;
}
@@ -476,11 +476,11 @@
// Debug helper. Prints the structure of an object.
template <bool kStatic, bool kRef>
struct DumpVisitor {
- static bool Callback(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
- art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
+ static bool Callback([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj,
+ [[maybe_unused]] art::ObjPtr<art::mirror::Class> klass,
art::ArtField& field,
size_t field_index,
- void* user_data ATTRIBUTE_UNUSED)
+ [[maybe_unused]] void* user_data)
REQUIRES_SHARED(art::Locks::mutator_lock_) {
LOG(ERROR) << (kStatic ? "static " : "instance ")
<< (kRef ? "ref " : "primitive ")
@@ -490,8 +490,7 @@
return false;
}
};
-ATTRIBUTE_UNUSED
-void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj)
+[[maybe_unused]] void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj)
REQUIRES_SHARED(art::Locks::mutator_lock_) {
if (obj->IsClass()) {
FieldVisitor<void, false>:: ReportFields(obj,
@@ -825,14 +824,13 @@
jclass klass,
const jvmtiHeapCallbacks* callbacks,
const void* user_data) {
- auto JvmtiIterateHeap = [](art::mirror::Object* obj ATTRIBUTE_UNUSED,
+ auto JvmtiIterateHeap = []([[maybe_unused]] art::mirror::Object* obj,
const jvmtiHeapCallbacks* cb_callbacks,
jlong class_tag,
jlong size,
jlong* tag,
jint length,
- void* cb_user_data)
- REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ void* cb_user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
return cb_callbacks->heap_iteration_callback(class_tag,
size,
tag,
@@ -1108,31 +1106,33 @@
}
// All instance fields.
- auto report_instance_field = [&](art::ObjPtr<art::mirror::Object> src,
- art::ObjPtr<art::mirror::Class> obj_klass ATTRIBUTE_UNUSED,
- art::ArtField& field,
- size_t field_index,
- void* user_data ATTRIBUTE_UNUSED)
- REQUIRES_SHARED(art::Locks::mutator_lock_)
- REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
- art::ObjPtr<art::mirror::Object> field_value = field.GetObject(src);
- if (field_value != nullptr) {
- jvmtiHeapReferenceInfo reference_info;
- memset(&reference_info, 0, sizeof(reference_info));
+ auto report_instance_field =
+ [&](art::ObjPtr<art::mirror::Object> src,
+ [[maybe_unused]] art::ObjPtr<art::mirror::Class> obj_klass,
+ art::ArtField& field,
+ size_t field_index,
+ [[maybe_unused]] void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_)
+ REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
+ art::ObjPtr<art::mirror::Object> field_value = field.GetObject(src);
+ if (field_value != nullptr) {
+ jvmtiHeapReferenceInfo reference_info;
+ memset(&reference_info, 0, sizeof(reference_info));
- reference_info.field.index = field_index;
+ reference_info.field.index = field_index;
- jvmtiHeapReferenceKind kind =
- field.GetOffset().Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
- ? JVMTI_HEAP_REFERENCE_CLASS
- : JVMTI_HEAP_REFERENCE_FIELD;
- const jvmtiHeapReferenceInfo* reference_info_ptr =
- kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
+ jvmtiHeapReferenceKind kind =
+ field.GetOffset().Int32Value() ==
+ art::mirror::Object::ClassOffset().Int32Value() ?
+ JVMTI_HEAP_REFERENCE_CLASS :
+ JVMTI_HEAP_REFERENCE_FIELD;
+ const jvmtiHeapReferenceInfo* reference_info_ptr =
+ kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
- return !ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src.Ptr(), field_value.Ptr());
- }
- return false;
- };
+ return !ReportReferenceMaybeEnqueue(
+ kind, reference_info_ptr, src.Ptr(), field_value.Ptr());
+ }
+ return false;
+ };
stop_reports_ = FieldVisitor<void, true>::ReportFields(obj,
nullptr,
VisitorFalse<void>,
@@ -1241,27 +1241,27 @@
DCHECK_EQ(h_klass.Get(), klass);
// Declared static fields.
- auto report_static_field = [&](art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
- art::ObjPtr<art::mirror::Class> obj_klass,
- art::ArtField& field,
- size_t field_index,
- void* user_data ATTRIBUTE_UNUSED)
- REQUIRES_SHARED(art::Locks::mutator_lock_)
- REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
- art::ObjPtr<art::mirror::Object> field_value = field.GetObject(obj_klass);
- if (field_value != nullptr) {
- jvmtiHeapReferenceInfo reference_info;
- memset(&reference_info, 0, sizeof(reference_info));
+ auto report_static_field =
+ [&]([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj,
+ art::ObjPtr<art::mirror::Class> obj_klass,
+ art::ArtField& field,
+ size_t field_index,
+ [[maybe_unused]] void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_)
+ REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
+ art::ObjPtr<art::mirror::Object> field_value = field.GetObject(obj_klass);
+ if (field_value != nullptr) {
+ jvmtiHeapReferenceInfo reference_info;
+ memset(&reference_info, 0, sizeof(reference_info));
- reference_info.field.index = static_cast<jint>(field_index);
+ reference_info.field.index = static_cast<jint>(field_index);
- return !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
- &reference_info,
- obj_klass.Ptr(),
- field_value.Ptr());
- }
- return false;
- };
+ return !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
+ &reference_info,
+ obj_klass.Ptr(),
+ field_value.Ptr());
+ }
+ return false;
+ };
stop_reports_ = FieldVisitor<void, false>::ReportFields(klass,
nullptr,
VisitorFalse<void>,
@@ -1473,7 +1473,7 @@
return ERR(NONE);
}
-jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
+jvmtiError HeapUtil::ForceGarbageCollection([[maybe_unused]] jvmtiEnv* env) {
art::Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references= */ false);
return ERR(NONE);
@@ -1666,7 +1666,7 @@
}
// java.lang.ref.Reference visitor.
- void operator()(art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
+ void operator()([[maybe_unused]] art::ObjPtr<art::mirror::Class> klass,
art::ObjPtr<art::mirror::Reference> ref) const
REQUIRES_SHARED(art::Locks::mutator_lock_) {
operator()(ref, art::mirror::Reference::ReferentOffset(), /* is_static */ false);
diff --git a/openjdkjvmti/ti_jni.cc b/openjdkjvmti/ti_jni.cc
index b655d6a..98d4ec7 100644
--- a/openjdkjvmti/ti_jni.cc
+++ b/openjdkjvmti/ti_jni.cc
@@ -42,7 +42,7 @@
namespace openjdkjvmti {
-jvmtiError JNIUtil::SetJNIFunctionTable(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError JNIUtil::SetJNIFunctionTable([[maybe_unused]] jvmtiEnv* env,
const jniNativeInterface* function_table) {
// While we supporting setting null (which will reset the table), the spec says no.
if (function_table == nullptr) {
diff --git a/openjdkjvmti/ti_logging.cc b/openjdkjvmti/ti_logging.cc
index 8740ec6..82057b1 100644
--- a/openjdkjvmti/ti_logging.cc
+++ b/openjdkjvmti/ti_logging.cc
@@ -100,7 +100,7 @@
return OK;
}
-jvmtiError LogUtil::SetVerboseFlag(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError LogUtil::SetVerboseFlag([[maybe_unused]] jvmtiEnv* env,
jvmtiVerboseFlag flag,
jboolean value) {
if (flag == jvmtiVerboseFlag::JVMTI_VERBOSE_OTHER) {
diff --git a/openjdkjvmti/ti_method.cc b/openjdkjvmti/ti_method.cc
index 99a5d9c..dc3f6dc 100644
--- a/openjdkjvmti/ti_method.cc
+++ b/openjdkjvmti/ti_method.cc
@@ -162,7 +162,7 @@
return OK;
}
-jvmtiError MethodUtil::GetArgumentsSize(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MethodUtil::GetArgumentsSize([[maybe_unused]] jvmtiEnv* env,
jmethodID method,
jint* size_ptr) {
if (method == nullptr) {
@@ -284,7 +284,7 @@
return release(entry_count_ptr, table_ptr);
}
-jvmtiError MethodUtil::GetMaxLocals(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MethodUtil::GetMaxLocals([[maybe_unused]] jvmtiEnv* env,
jmethodID method,
jint* max_ptr) {
if (method == nullptr) {
@@ -380,7 +380,7 @@
return ERR(NONE);
}
-jvmtiError MethodUtil::GetMethodDeclaringClass(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MethodUtil::GetMethodDeclaringClass([[maybe_unused]] jvmtiEnv* env,
jmethodID method,
jclass* declaring_class_ptr) {
if (declaring_class_ptr == nullptr) {
@@ -397,7 +397,7 @@
return ERR(NONE);
}
-jvmtiError MethodUtil::GetMethodLocation(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MethodUtil::GetMethodLocation([[maybe_unused]] jvmtiEnv* env,
jmethodID method,
jlocation* start_location_ptr,
jlocation* end_location_ptr) {
@@ -430,7 +430,7 @@
return ERR(NONE);
}
-jvmtiError MethodUtil::GetMethodModifiers(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MethodUtil::GetMethodModifiers([[maybe_unused]] jvmtiEnv* env,
jmethodID method,
jint* modifiers_ptr) {
if (modifiers_ptr == nullptr) {
@@ -507,7 +507,7 @@
}
template <typename T>
-static jvmtiError IsMethodT(jvmtiEnv* env ATTRIBUTE_UNUSED,
+static jvmtiError IsMethodT([[maybe_unused]] jvmtiEnv* env,
jmethodID method,
T test,
jboolean* is_t_ptr) {
@@ -833,9 +833,9 @@
return res;
}
- jvmtiError GetTypeErrorInner(art::ArtMethod* method ATTRIBUTE_UNUSED,
+ jvmtiError GetTypeErrorInner([[maybe_unused]] art::ArtMethod* method,
SlotType slot_type,
- const std::string& descriptor ATTRIBUTE_UNUSED)
+ [[maybe_unused]] const std::string& descriptor)
REQUIRES_SHARED(art::Locks::mutator_lock_) {
switch (type_) {
case art::Primitive::kPrimFloat:
@@ -1177,7 +1177,7 @@
art::GcRoot<art::mirror::Object> val_;
};
-jvmtiError MethodUtil::GetLocalInstance(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MethodUtil::GetLocalInstance([[maybe_unused]] jvmtiEnv* env,
jthread thread,
jint depth,
jobject* data) {
diff --git a/openjdkjvmti/ti_monitor.cc b/openjdkjvmti/ti_monitor.cc
index f244cc1..469693d 100644
--- a/openjdkjvmti/ti_monitor.cc
+++ b/openjdkjvmti/ti_monitor.cc
@@ -225,7 +225,7 @@
return reinterpret_cast<JvmtiMonitor*>(id);
}
-jvmtiError MonitorUtil::CreateRawMonitor(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MonitorUtil::CreateRawMonitor([[maybe_unused]] jvmtiEnv* env,
const char* name,
jrawMonitorID* monitor_ptr) {
if (name == nullptr || monitor_ptr == nullptr) {
@@ -238,7 +238,7 @@
return ERR(NONE);
}
-jvmtiError MonitorUtil::DestroyRawMonitor(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) {
+jvmtiError MonitorUtil::DestroyRawMonitor([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) {
if (id == nullptr) {
return ERR(INVALID_MONITOR);
}
@@ -253,7 +253,7 @@
return ERR(NONE);
}
-jvmtiError MonitorUtil::RawMonitorEnterNoSuspend(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) {
+jvmtiError MonitorUtil::RawMonitorEnterNoSuspend([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) {
if (id == nullptr) {
return ERR(INVALID_MONITOR);
}
@@ -266,7 +266,7 @@
return ERR(NONE);
}
-jvmtiError MonitorUtil::RawMonitorEnter(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) {
+jvmtiError MonitorUtil::RawMonitorEnter([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) {
if (id == nullptr) {
return ERR(INVALID_MONITOR);
}
@@ -279,7 +279,7 @@
return ERR(NONE);
}
-jvmtiError MonitorUtil::RawMonitorExit(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) {
+jvmtiError MonitorUtil::RawMonitorExit([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) {
if (id == nullptr) {
return ERR(INVALID_MONITOR);
}
@@ -294,7 +294,7 @@
return ERR(NONE);
}
-jvmtiError MonitorUtil::RawMonitorWait(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MonitorUtil::RawMonitorWait([[maybe_unused]] jvmtiEnv* env,
jrawMonitorID id,
jlong millis) {
if (id == nullptr) {
@@ -322,7 +322,7 @@
return ERR(NONE);
}
-jvmtiError MonitorUtil::RawMonitorNotify(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) {
+jvmtiError MonitorUtil::RawMonitorNotify([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) {
if (id == nullptr) {
return ERR(INVALID_MONITOR);
}
@@ -337,7 +337,7 @@
return ERR(NONE);
}
-jvmtiError MonitorUtil::RawMonitorNotifyAll(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) {
+jvmtiError MonitorUtil::RawMonitorNotifyAll([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) {
if (id == nullptr) {
return ERR(INVALID_MONITOR);
}
@@ -352,7 +352,7 @@
return ERR(NONE);
}
-jvmtiError MonitorUtil::GetCurrentContendedMonitor(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError MonitorUtil::GetCurrentContendedMonitor([[maybe_unused]] jvmtiEnv* env,
jthread thread,
jobject* monitor) {
if (monitor == nullptr) {
diff --git a/openjdkjvmti/ti_object.cc b/openjdkjvmti/ti_object.cc
index eb1140d..f37df86 100644
--- a/openjdkjvmti/ti_object.cc
+++ b/openjdkjvmti/ti_object.cc
@@ -40,7 +40,7 @@
namespace openjdkjvmti {
-jvmtiError ObjectUtil::GetObjectSize(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ObjectUtil::GetObjectSize([[maybe_unused]] jvmtiEnv* env,
jobject jobject,
jlong* size_ptr) {
if (jobject == nullptr) {
@@ -57,7 +57,7 @@
return ERR(NONE);
}
-jvmtiError ObjectUtil::GetObjectHashCode(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ObjectUtil::GetObjectHashCode([[maybe_unused]] jvmtiEnv* env,
jobject jobject,
jint* hash_code_ptr) {
if (jobject == nullptr) {
diff --git a/openjdkjvmti/ti_phase.cc b/openjdkjvmti/ti_phase.cc
index 4fa97f1..89bf1aa 100644
--- a/openjdkjvmti/ti_phase.cc
+++ b/openjdkjvmti/ti_phase.cc
@@ -97,7 +97,7 @@
PhaseUtil::PhaseCallback gPhaseCallback;
-jvmtiError PhaseUtil::GetPhase(jvmtiEnv* env ATTRIBUTE_UNUSED, jvmtiPhase* phase_ptr) {
+jvmtiError PhaseUtil::GetPhase([[maybe_unused]] jvmtiEnv* env, jvmtiPhase* phase_ptr) {
if (phase_ptr == nullptr) {
return ERR(NULL_POINTER);
}
diff --git a/openjdkjvmti/ti_properties.cc b/openjdkjvmti/ti_properties.cc
index 4fb3070..c6490c3 100644
--- a/openjdkjvmti/ti_properties.cc
+++ b/openjdkjvmti/ti_properties.cc
@@ -226,9 +226,9 @@
return ERR(NOT_AVAILABLE);
}
-jvmtiError PropertiesUtil::SetSystemProperty(jvmtiEnv* env ATTRIBUTE_UNUSED,
- const char* property ATTRIBUTE_UNUSED,
- const char* value ATTRIBUTE_UNUSED) {
+jvmtiError PropertiesUtil::SetSystemProperty([[maybe_unused]] jvmtiEnv* env,
+ [[maybe_unused]] const char* property,
+ [[maybe_unused]] const char* value) {
// We do not allow manipulation of any property here.
return ERR(NOT_AVAILABLE);
}
diff --git a/openjdkjvmti/ti_redefine.cc b/openjdkjvmti/ti_redefine.cc
index aafca47..dfcbeb4 100644
--- a/openjdkjvmti/ti_redefine.cc
+++ b/openjdkjvmti/ti_redefine.cc
@@ -2372,9 +2372,9 @@
}
}
- void ClassLoad(art::Handle<art::mirror::Class> klass ATTRIBUTE_UNUSED) override {}
- void ClassPrepare(art::Handle<art::mirror::Class> klass1 ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Class> klass2 ATTRIBUTE_UNUSED) override {}
+ void ClassLoad([[maybe_unused]] art::Handle<art::mirror::Class> klass) override {}
+ void ClassPrepare([[maybe_unused]] art::Handle<art::mirror::Class> klass1,
+ [[maybe_unused]] art::Handle<art::mirror::Class> klass2) override {}
void SetRunning() {
is_running_ = true;
diff --git a/openjdkjvmti/ti_stack.cc b/openjdkjvmti/ti_stack.cc
index 8ee4adb..9af8861 100644
--- a/openjdkjvmti/ti_stack.cc
+++ b/openjdkjvmti/ti_stack.cc
@@ -716,7 +716,7 @@
size_t count;
};
-jvmtiError StackUtil::GetFrameCount(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError StackUtil::GetFrameCount([[maybe_unused]] jvmtiEnv* env,
jthread java_thread,
jint* count_ptr) {
// It is not great that we have to hold these locks for so long, but it is necessary to ensure
@@ -784,7 +784,7 @@
uint32_t dex_pc;
};
-jvmtiError StackUtil::GetFrameLocation(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError StackUtil::GetFrameLocation([[maybe_unused]] jvmtiEnv* env,
jthread java_thread,
jint depth,
jmethodID* method_ptr,
@@ -877,8 +877,8 @@
visitor->stack_depths.push_back(visitor->current_stack_depth);
}
- void VisitRoot(art::mirror::Object* obj, const art::RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ void VisitRoot(art::mirror::Object* obj, [[maybe_unused]] const art::RootInfo& info) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
for (const art::Handle<art::mirror::Object>& m : monitors) {
if (m.Get() == obj) {
return;
@@ -1219,7 +1219,7 @@
template <>
bool NonStandardExitFrames<NonStandardExitType::kForceReturn>::CheckFunctions(
- jvmtiEnv* env, art::ArtMethod* calling ATTRIBUTE_UNUSED, art::ArtMethod* called) {
+ jvmtiEnv* env, [[maybe_unused]] art::ArtMethod* calling, art::ArtMethod* called) {
if (UNLIKELY(called->IsNative())) {
result_ = ERR(OPAQUE_FRAME);
JVMTI_LOG(INFO, env) << "Cannot force early return from " << called->PrettyMethod()
@@ -1297,7 +1297,7 @@
template <>
void AddDelayedMethodExitEvent<std::nullptr_t>(EventHandler* handler,
art::ShadowFrame* frame,
- std::nullptr_t null_val ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] std::nullptr_t null_val) {
jvalue jval;
memset(&jval, 0, sizeof(jval));
handler->AddDelayedNonStandardExitEvent(frame, false, jval);
@@ -1316,13 +1316,13 @@
REQUIRES_SHARED(art::Locks::mutator_lock_)
REQUIRES(art::Locks::user_code_suspension_lock_, art::Locks::thread_list_lock_);
-#define SIMPLE_VALID_RETURN_TYPE(type, ...) \
- template <> \
- bool ValidReturnType<type>(art::Thread * self ATTRIBUTE_UNUSED, \
- art::ObjPtr<art::mirror::Class> return_type, \
- type value ATTRIBUTE_UNUSED) { \
- static constexpr std::initializer_list<art::Primitive::Type> types{ __VA_ARGS__ }; \
- return std::find(types.begin(), types.end(), return_type->GetPrimitiveType()) != types.end(); \
+#define SIMPLE_VALID_RETURN_TYPE(type, ...) \
+ template <> \
+ bool ValidReturnType<type>([[maybe_unused]] art::Thread * self, \
+ art::ObjPtr<art::mirror::Class> return_type, \
+ [[maybe_unused]] type value) { \
+ static constexpr std::initializer_list<art::Primitive::Type> types{__VA_ARGS__}; \
+ return std::find(types.begin(), types.end(), return_type->GetPrimitiveType()) != types.end(); \
}
SIMPLE_VALID_RETURN_TYPE(jlong, art::Primitive::kPrimLong);
diff --git a/openjdkjvmti/ti_thread.cc b/openjdkjvmti/ti_thread.cc
index b5bc35e..13eebbf 100644
--- a/openjdkjvmti/ti_thread.cc
+++ b/openjdkjvmti/ti_thread.cc
@@ -205,7 +205,7 @@
runtime->GetRuntimeCallbacks()->RemoveThreadLifecycleCallback(&gThreadCallback);
}
-jvmtiError ThreadUtil::GetCurrentThread(jvmtiEnv* env ATTRIBUTE_UNUSED, jthread* thread_ptr) {
+jvmtiError ThreadUtil::GetCurrentThread([[maybe_unused]] jvmtiEnv* env, jthread* thread_ptr) {
art::Thread* self = art::Thread::Current();
art::ScopedObjectAccess soa(self);
@@ -564,7 +564,7 @@
return WouldSuspendForUserCodeLocked(self);
}
-jvmtiError ThreadUtil::GetThreadState(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ThreadUtil::GetThreadState([[maybe_unused]] jvmtiEnv* env,
jthread thread,
jint* thread_state_ptr) {
if (thread_state_ptr == nullptr) {
@@ -940,7 +940,7 @@
return OK;
}
-jvmtiError ThreadUtil::SuspendThread(jvmtiEnv* env ATTRIBUTE_UNUSED, jthread thread) {
+jvmtiError ThreadUtil::SuspendThread([[maybe_unused]] jvmtiEnv* env, jthread thread) {
art::Thread* self = art::Thread::Current();
bool target_is_self = false;
{
@@ -961,8 +961,7 @@
}
}
-jvmtiError ThreadUtil::ResumeThread(jvmtiEnv* env ATTRIBUTE_UNUSED,
- jthread thread) {
+jvmtiError ThreadUtil::ResumeThread([[maybe_unused]] jvmtiEnv* env, jthread thread) {
if (thread == nullptr) {
return ERR(NULL_POINTER);
}
@@ -1079,7 +1078,7 @@
return OK;
}
-jvmtiError ThreadUtil::StopThread(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError ThreadUtil::StopThread([[maybe_unused]] jvmtiEnv* env,
jthread thread,
jobject exception) {
art::Thread* self = art::Thread::Current();
@@ -1128,7 +1127,7 @@
}
}
-jvmtiError ThreadUtil::InterruptThread(jvmtiEnv* env ATTRIBUTE_UNUSED, jthread thread) {
+jvmtiError ThreadUtil::InterruptThread([[maybe_unused]] jvmtiEnv* env, jthread thread) {
art::Thread* self = art::Thread::Current();
art::ScopedObjectAccess soa(self);
art::MutexLock tll_mu(self, *art::Locks::thread_list_lock_);
diff --git a/openjdkjvmti/ti_timers.cc b/openjdkjvmti/ti_timers.cc
index 11b58c4..f02501f 100644
--- a/openjdkjvmti/ti_timers.cc
+++ b/openjdkjvmti/ti_timers.cc
@@ -45,7 +45,7 @@
namespace openjdkjvmti {
-jvmtiError TimerUtil::GetAvailableProcessors(jvmtiEnv* env ATTRIBUTE_UNUSED,
+jvmtiError TimerUtil::GetAvailableProcessors([[maybe_unused]] jvmtiEnv* env,
jint* processor_count_ptr) {
if (processor_count_ptr == nullptr) {
return ERR(NULL_POINTER);
@@ -56,7 +56,7 @@
return ERR(NONE);
}
-jvmtiError TimerUtil::GetTimerInfo(jvmtiEnv* env ATTRIBUTE_UNUSED, jvmtiTimerInfo* info_ptr) {
+jvmtiError TimerUtil::GetTimerInfo([[maybe_unused]] jvmtiEnv* env, jvmtiTimerInfo* info_ptr) {
if (info_ptr == nullptr) {
return ERR(NULL_POINTER);
}
@@ -69,7 +69,7 @@
return ERR(NONE);
}
-jvmtiError TimerUtil::GetTime(jvmtiEnv* env ATTRIBUTE_UNUSED, jlong* nanos_ptr) {
+jvmtiError TimerUtil::GetTime([[maybe_unused]] jvmtiEnv* env, jlong* nanos_ptr) {
if (nanos_ptr == nullptr) {
return ERR(NULL_POINTER);
}
diff --git a/perfetto_hprof/perfetto_hprof.cc b/perfetto_hprof/perfetto_hprof.cc
index 906362a..a6ad4a4 100644
--- a/perfetto_hprof/perfetto_hprof.cc
+++ b/perfetto_hprof/perfetto_hprof.cc
@@ -438,10 +438,10 @@
referred_objects_->emplace_back(std::move(field_name), ref);
}
- void VisitRootIfNonNull(art::mirror::CompressedReference<art::mirror::Object>* root
- ATTRIBUTE_UNUSED) const {}
- void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root
- ATTRIBUTE_UNUSED) const {}
+ void VisitRootIfNonNull(
+ [[maybe_unused]] art::mirror::CompressedReference<art::mirror::Object>* root) const {}
+ void VisitRoot(
+ [[maybe_unused]] art::mirror::CompressedReference<art::mirror::Object>* root) const {}
private:
// We can use a raw Object* pointer here, because there are no concurrent GC threads after the
diff --git a/runtime/aot_class_linker.h b/runtime/aot_class_linker.h
index 30a19c8..be4ab2b 100644
--- a/runtime/aot_class_linker.h
+++ b/runtime/aot_class_linker.h
@@ -39,11 +39,11 @@
void SetSdkChecker(std::unique_ptr<SdkChecker>&& sdk_checker_);
const SdkChecker* GetSdkChecker() const;
- bool DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const override
+ bool DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtMethod* art_method) const override
REQUIRES_SHARED(Locks::mutator_lock_);
- bool DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const override
+ bool DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtField* art_field) const override
REQUIRES_SHARED(Locks::mutator_lock_);
- bool DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const override;
+ bool DenyAccessBasedOnPublicSdk([[maybe_unused]] const char* type_descriptor) const override;
void SetEnablePublicSdkChecks(bool enabled) override;
protected:
diff --git a/runtime/arch/arm/fault_handler_arm.cc b/runtime/arch/arm/fault_handler_arm.cc
index f02ec65..bf3eaa7 100644
--- a/runtime/arch/arm/fault_handler_arm.cc
+++ b/runtime/arch/arm/fault_handler_arm.cc
@@ -45,7 +45,7 @@
return instr_size;
}
-uintptr_t FaultManager::GetFaultPc(siginfo_t* siginfo ATTRIBUTE_UNUSED, void* context) {
+uintptr_t FaultManager::GetFaultPc([[maybe_unused]] siginfo_t* siginfo, void* context) {
ucontext_t* uc = reinterpret_cast<ucontext_t*>(context);
mcontext_t* mc = reinterpret_cast<mcontext_t*>(&uc->uc_mcontext);
if (mc->arm_sp == 0) {
@@ -61,7 +61,7 @@
return mc->arm_sp;
}
-bool NullPointerHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info, void* context) {
+bool NullPointerHandler::Action([[maybe_unused]] int sig, siginfo_t* info, void* context) {
uintptr_t fault_address = reinterpret_cast<uintptr_t>(info->si_addr);
if (!IsValidFaultAddress(fault_address)) {
return false;
@@ -115,7 +115,8 @@
// The offset from r9 is Thread::ThreadSuspendTriggerOffset().
// To check for a suspend check, we examine the instructions that caused
// the fault (at PC-4 and PC).
-bool SuspensionHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED,
+bool SuspensionHandler::Action([[maybe_unused]] int sig,
+ [[maybe_unused]] siginfo_t* info,
void* context) {
// These are the instructions to check for. The first one is the ldr r0,[r9,#xxx]
// where xxx is the offset of the suspend trigger.
@@ -186,7 +187,8 @@
// If we determine this is a stack overflow we need to move the stack pointer
// to the overflow region below the protected region.
-bool StackOverflowHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED,
+bool StackOverflowHandler::Action([[maybe_unused]] int sig,
+ [[maybe_unused]] siginfo_t* info,
void* context) {
ucontext_t* uc = reinterpret_cast<ucontext_t*>(context);
mcontext_t* mc = reinterpret_cast<mcontext_t*>(&uc->uc_mcontext);
diff --git a/runtime/arch/arm/instruction_set_features_arm.cc b/runtime/arch/arm/instruction_set_features_arm.cc
index 749476b..3309523 100644
--- a/runtime/arch/arm/instruction_set_features_arm.cc
+++ b/runtime/arch/arm/instruction_set_features_arm.cc
@@ -243,9 +243,9 @@
// A signal handler called by a fault for an illegal instruction. We record the fact in r0
// and then increment the PC in the signal context to return to the next instruction. We know the
// instruction is 4 bytes long.
-static void bad_instr_handle(int signo ATTRIBUTE_UNUSED,
- siginfo_t* si ATTRIBUTE_UNUSED,
- void* data) {
+static void bad_instr_handle([[maybe_unused]] int signo,
+ [[maybe_unused]] siginfo_t* si,
+ void* data) {
#if defined(__arm__)
ucontext_t* uc = reinterpret_cast<ucontext_t*>(data);
mcontext_t* mc = &uc->uc_mcontext;
diff --git a/runtime/arch/arm64/fault_handler_arm64.cc b/runtime/arch/arm64/fault_handler_arm64.cc
index 3878b57..cebff9b 100644
--- a/runtime/arch/arm64/fault_handler_arm64.cc
+++ b/runtime/arch/arm64/fault_handler_arm64.cc
@@ -62,7 +62,7 @@
return mc->sp;
}
-bool NullPointerHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info, void* context) {
+bool NullPointerHandler::Action([[maybe_unused]] int sig, siginfo_t* info, void* context) {
uintptr_t fault_address = reinterpret_cast<uintptr_t>(info->si_addr);
if (!IsValidFaultAddress(fault_address)) {
return false;
@@ -96,7 +96,8 @@
// A suspend check is done using the following instruction:
// 0x...: f94002b5 ldr x21, [x21, #0]
// To check for a suspend check, we examine the instruction that caused the fault (at PC).
-bool SuspensionHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED,
+bool SuspensionHandler::Action([[maybe_unused]] int sig,
+ [[maybe_unused]] siginfo_t* info,
void* context) {
constexpr uint32_t kSuspendCheckRegister = 21;
constexpr uint32_t checkinst =
@@ -128,7 +129,8 @@
return true;
}
-bool StackOverflowHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED,
+bool StackOverflowHandler::Action([[maybe_unused]] int sig,
+ [[maybe_unused]] siginfo_t* info,
void* context) {
ucontext_t* uc = reinterpret_cast<ucontext_t*>(context);
mcontext_t* mc = reinterpret_cast<mcontext_t*>(&uc->uc_mcontext);
diff --git a/runtime/arch/context.h b/runtime/arch/context.h
index be7adc7..efeacd6 100644
--- a/runtime/arch/context.h
+++ b/runtime/arch/context.h
@@ -90,9 +90,7 @@
// Set `new_value` to the physical register containing the dex PC pointer in
// an nterp frame.
- virtual void SetNterpDexPC(uintptr_t new_value ATTRIBUTE_UNUSED) {
- abort();
- }
+ virtual void SetNterpDexPC([[maybe_unused]] uintptr_t new_value) { abort(); }
// Switches execution of the executing context to this context
NO_RETURN virtual void DoLongJump() = 0;
diff --git a/runtime/arch/instruction_set_features.cc b/runtime/arch/instruction_set_features.cc
index d88c544..17b9dc3 100644
--- a/runtime/arch/instruction_set_features.cc
+++ b/runtime/arch/instruction_set_features.cc
@@ -313,7 +313,7 @@
}
std::unique_ptr<const InstructionSetFeatures> InstructionSetFeatures::AddRuntimeDetectedFeatures(
- const InstructionSetFeatures *features ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] const InstructionSetFeatures* features) const {
UNIMPLEMENTED(FATAL) << kRuntimeISA;
UNREACHABLE();
}
diff --git a/runtime/arch/instruction_set_features.h b/runtime/arch/instruction_set_features.h
index 1f41b39..1cb0fbb 100644
--- a/runtime/arch/instruction_set_features.h
+++ b/runtime/arch/instruction_set_features.h
@@ -146,8 +146,8 @@
std::string* error_msg) const = 0;
// Add run-time detected architecture specific features in sub-classes.
- virtual std::unique_ptr<const InstructionSetFeatures>
- AddRuntimeDetectedFeatures(const InstructionSetFeatures *features ATTRIBUTE_UNUSED) const;
+ virtual std::unique_ptr<const InstructionSetFeatures> AddRuntimeDetectedFeatures(
+ [[maybe_unused]] const InstructionSetFeatures* features) const;
private:
DISALLOW_COPY_AND_ASSIGN(InstructionSetFeatures);
diff --git a/runtime/arch/riscv64/instruction_set_features_riscv64.cc b/runtime/arch/riscv64/instruction_set_features_riscv64.cc
index 2ef4f84..544b717 100644
--- a/runtime/arch/riscv64/instruction_set_features_riscv64.cc
+++ b/runtime/arch/riscv64/instruction_set_features_riscv64.cc
@@ -30,7 +30,7 @@
}
Riscv64FeaturesUniquePtr Riscv64InstructionSetFeatures::FromVariant(
- const std::string& variant, std::string* error_msg ATTRIBUTE_UNUSED) {
+ const std::string& variant, [[maybe_unused]] std::string* error_msg) {
if (variant != "generic") {
LOG(WARNING) << "Unexpected CPU variant for Riscv64 using defaults: " << variant;
}
@@ -90,8 +90,8 @@
std::unique_ptr<const InstructionSetFeatures>
Riscv64InstructionSetFeatures::AddFeaturesFromSplitString(
- const std::vector<std::string>& features ATTRIBUTE_UNUSED,
- std::string* error_msg ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] const std::vector<std::string>& features,
+ [[maybe_unused]] std::string* error_msg) const {
UNIMPLEMENTED(WARNING);
return std::unique_ptr<const InstructionSetFeatures>(new Riscv64InstructionSetFeatures(bits_));
}
diff --git a/runtime/arch/x86/fault_handler_x86.cc b/runtime/arch/x86/fault_handler_x86.cc
index cd2d38f..efc5249 100644
--- a/runtime/arch/x86/fault_handler_x86.cc
+++ b/runtime/arch/x86/fault_handler_x86.cc
@@ -259,7 +259,7 @@
#undef FETCH_OR_SKIP_BYTE
}
-uintptr_t FaultManager::GetFaultPc(siginfo_t* siginfo ATTRIBUTE_UNUSED, void* context) {
+uintptr_t FaultManager::GetFaultPc([[maybe_unused]] siginfo_t* siginfo, void* context) {
ucontext_t* uc = reinterpret_cast<ucontext_t*>(context);
if (uc->CTX_ESP == 0) {
VLOG(signals) << "Missing SP";
diff --git a/runtime/arch/x86/instruction_set_features_x86.cc b/runtime/arch/x86/instruction_set_features_x86.cc
index f11aca9..6976e9c 100644
--- a/runtime/arch/x86/instruction_set_features_x86.cc
+++ b/runtime/arch/x86/instruction_set_features_x86.cc
@@ -119,9 +119,9 @@
}
}
-X86FeaturesUniquePtr X86InstructionSetFeatures::FromVariant(
- const std::string& variant, std::string* error_msg ATTRIBUTE_UNUSED,
- bool x86_64) {
+X86FeaturesUniquePtr X86InstructionSetFeatures::FromVariant(const std::string& variant,
+ [[maybe_unused]] std::string* error_msg,
+ bool x86_64) {
const bool is_runtime_isa =
kRuntimeISA == (x86_64 ? InstructionSet::kX86_64 : InstructionSet::kX86);
if (is_runtime_isa && variant == "default") {
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index 3ea5130..7353b14 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -52,7 +52,7 @@
template <> struct ShortyTraits<'V'> {
using Type = void;
- static Type Get(const JValue& value ATTRIBUTE_UNUSED) {}
+ static Type Get([[maybe_unused]] const JValue& value) {}
// `kVRegCount` and `Set()` are not defined.
};
@@ -152,8 +152,8 @@
}
template <char... ArgType>
-inline ALWAYS_INLINE void FillVRegs(uint32_t* vregs ATTRIBUTE_UNUSED,
- typename ShortyTraits<ArgType>::Type... args ATTRIBUTE_UNUSED)
+inline ALWAYS_INLINE void FillVRegs([[maybe_unused]] uint32_t* vregs,
+ [[maybe_unused]] typename ShortyTraits<ArgType>::Type... args)
REQUIRES_SHARED(Locks::mutator_lock_) {}
template <char FirstArgType, char... ArgType>
diff --git a/runtime/base/quasi_atomic.h b/runtime/base/quasi_atomic.h
index 5aa4dde..95d7bb2 100644
--- a/runtime/base/quasi_atomic.h
+++ b/runtime/base/quasi_atomic.h
@@ -46,7 +46,7 @@
// quasiatomic operations that are performed on partially-overlapping
// memory.
class QuasiAtomic {
- static constexpr bool NeedSwapMutexes(InstructionSet isa ATTRIBUTE_UNUSED) {
+ static constexpr bool NeedSwapMutexes([[maybe_unused]] InstructionSet isa) {
// TODO: Remove this function now that mips support has been removed.
return false;
}
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index fca86a5..f3562a4 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1496,16 +1496,14 @@
// Visit Class Fields
void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
// References within image or across images don't need a read barrier.
ObjPtr<mirror::Object> referred_obj =
obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
TestObject(referred_obj);
}
- void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Reference> ref) const
+ void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
}
@@ -3340,7 +3338,7 @@
return Finish(h_klass);
}
- ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
+ ObjPtr<mirror::Class> Finish([[maybe_unused]] nullptr_t np)
REQUIRES_SHARED(Locks::mutator_lock_) {
ScopedNullHandle<mirror::Class> snh;
return Finish(snh);
@@ -7361,8 +7359,8 @@
class VTableIndexCheckerRelease {
protected:
- explicit VTableIndexCheckerRelease(size_t vtable_length ATTRIBUTE_UNUSED) {}
- void CheckIndex(uint32_t index ATTRIBUTE_UNUSED) const {}
+ explicit VTableIndexCheckerRelease([[maybe_unused]] size_t vtable_length) {}
+ void CheckIndex([[maybe_unused]] uint32_t index) const {}
};
using VTableIndexChecker =
@@ -10927,27 +10925,27 @@
Runtime::Current()->GetJavaVM()->DecodeWeakGlobalAsStrong(result));
}
-bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const
+bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtMethod* art_method) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Should not be called on ClassLinker, only on AotClassLinker that overrides this.
LOG(FATAL) << "UNREACHABLE";
UNREACHABLE();
}
-bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const
+bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtField* art_field) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Should not be called on ClassLinker, only on AotClassLinker that overrides this.
LOG(FATAL) << "UNREACHABLE";
UNREACHABLE();
}
-bool ClassLinker::DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const {
+bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] const char* type_descriptor) const {
// Should not be called on ClassLinker, only on AotClassLinker that overrides this.
LOG(FATAL) << "UNREACHABLE";
UNREACHABLE();
}
-void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) {
+void ClassLinker::SetEnablePublicSdkChecks([[maybe_unused]] bool enabled) {
// Should not be called on ClassLinker, only on AotClassLinker that overrides this.
LOG(FATAL) << "UNREACHABLE";
UNREACHABLE();
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index d14e46a..6fdd94d 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -1414,13 +1414,13 @@
// different object. It is the listener's responsibility to handle this.
// Note: This callback is rarely useful so a default implementation has been given that does
// nothing.
- virtual void ClassPreDefine(const char* descriptor ATTRIBUTE_UNUSED,
- Handle<mirror::Class> klass ATTRIBUTE_UNUSED,
- Handle<mirror::ClassLoader> class_loader ATTRIBUTE_UNUSED,
- const DexFile& initial_dex_file ATTRIBUTE_UNUSED,
- const dex::ClassDef& initial_class_def ATTRIBUTE_UNUSED,
- /*out*/DexFile const** final_dex_file ATTRIBUTE_UNUSED,
- /*out*/dex::ClassDef const** final_class_def ATTRIBUTE_UNUSED)
+ virtual void ClassPreDefine([[maybe_unused]] const char* descriptor,
+ [[maybe_unused]] Handle<mirror::Class> klass,
+ [[maybe_unused]] Handle<mirror::ClassLoader> class_loader,
+ [[maybe_unused]] const DexFile& initial_dex_file,
+ [[maybe_unused]] const dex::ClassDef& initial_class_def,
+ [[maybe_unused]] /*out*/ DexFile const** final_dex_file,
+ [[maybe_unused]] /*out*/ dex::ClassDef const** final_class_def)
REQUIRES_SHARED(Locks::mutator_lock_) {}
// A class has been loaded.
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 95b224f..981f5ea 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -443,7 +443,7 @@
class TestRootVisitor : public SingleRootVisitor {
public:
- void VisitRoot(mirror::Object* root, const RootInfo& info ATTRIBUTE_UNUSED) override {
+ void VisitRoot(mirror::Object* root, [[maybe_unused]] const RootInfo& info) override {
EXPECT_TRUE(root != nullptr);
}
};
diff --git a/runtime/common_runtime_test.h b/runtime/common_runtime_test.h
index 85c48a2..0f82c4b 100644
--- a/runtime/common_runtime_test.h
+++ b/runtime/common_runtime_test.h
@@ -136,7 +136,7 @@
protected:
// Allow subclases such as CommonCompilerTest to add extra options.
- virtual void SetUpRuntimeOptions(RuntimeOptions* options ATTRIBUTE_UNUSED) {}
+ virtual void SetUpRuntimeOptions([[maybe_unused]] RuntimeOptions* options) {}
// Called before the runtime is created.
virtual void PreRuntimeCreate() {}
diff --git a/runtime/compiler_callbacks.h b/runtime/compiler_callbacks.h
index f76ee66..3fa2fa3 100644
--- a/runtime/compiler_callbacks.h
+++ b/runtime/compiler_callbacks.h
@@ -52,25 +52,25 @@
virtual void ClassRejected(ClassReference ref) = 0;
virtual verifier::VerifierDeps* GetVerifierDeps() const = 0;
- virtual void SetVerifierDeps(verifier::VerifierDeps* deps ATTRIBUTE_UNUSED) {}
+ virtual void SetVerifierDeps([[maybe_unused]] verifier::VerifierDeps* deps) {}
// Return the class status of a previous stage of the compilation. This can be used, for example,
// when class unloading is enabled during multidex compilation.
- virtual ClassStatus GetPreviousClassState(ClassReference ref ATTRIBUTE_UNUSED) {
+ virtual ClassStatus GetPreviousClassState([[maybe_unused]] ClassReference ref) {
return ClassStatus::kNotReady;
}
- virtual void SetDoesClassUnloading(bool does_class_unloading ATTRIBUTE_UNUSED,
- CompilerDriver* compiler_driver ATTRIBUTE_UNUSED) {}
+ virtual void SetDoesClassUnloading([[maybe_unused]] bool does_class_unloading,
+ [[maybe_unused]] CompilerDriver* compiler_driver) {}
bool IsBootImage() {
return mode_ == CallbackMode::kCompileBootImage;
}
- virtual void UpdateClassState(ClassReference ref ATTRIBUTE_UNUSED,
- ClassStatus state ATTRIBUTE_UNUSED) {}
+ virtual void UpdateClassState([[maybe_unused]] ClassReference ref,
+ [[maybe_unused]] ClassStatus state) {}
- virtual bool CanUseOatStatusForVerification(mirror::Class* klass ATTRIBUTE_UNUSED)
+ virtual bool CanUseOatStatusForVerification([[maybe_unused]] mirror::Class* klass)
REQUIRES_SHARED(Locks::mutator_lock_) {
return false;
}
diff --git a/runtime/elf_file.cc b/runtime/elf_file.cc
index 0b4f1f3..2ceda12 100644
--- a/runtime/elf_file.cc
+++ b/runtime/elf_file.cc
@@ -1076,7 +1076,7 @@
}
static InstructionSet GetInstructionSetFromELF(uint16_t e_machine,
- uint32_t e_flags ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] uint32_t e_flags) {
switch (e_machine) {
case EM_ARM:
return InstructionSet::kArm;
diff --git a/runtime/entrypoints/quick/quick_field_entrypoints.cc b/runtime/entrypoints/quick/quick_field_entrypoints.cc
index e2fc232..fb32c95 100644
--- a/runtime/entrypoints/quick/quick_field_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_field_entrypoints.cc
@@ -420,7 +420,7 @@
return ReadBarrier::Mark(obj);
}
-extern "C" mirror::Object* artReadBarrierSlow(mirror::Object* ref ATTRIBUTE_UNUSED,
+extern "C" mirror::Object* artReadBarrierSlow([[maybe_unused]] mirror::Object* ref,
mirror::Object* obj,
uint32_t offset) {
// Used only in connection with non-volatile loads.
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 7e96f29..905cee2 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -1634,9 +1634,8 @@
}
virtual void WalkHeader(
- BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm ATTRIBUTE_UNUSED)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- }
+ [[maybe_unused]] BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm)
+ REQUIRES_SHARED(Locks::mutator_lock_) {}
void Walk(const char* shorty, uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) {
BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
@@ -2174,10 +2173,8 @@
}
extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
- uint32_t method_idx,
- mirror::Object* this_object ATTRIBUTE_UNUSED,
- Thread* self,
- ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
+ uint32_t method_idx, [[maybe_unused]] mirror::Object* this_object, Thread* self, ArtMethod** sp)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
// For static, this_object is not required and may be random garbage. Don't pass it down so that
// it doesn't cause ObjPtr alignment failure check.
return artInvokeCommon<kStatic>(method_idx, nullptr, self, sp);
diff --git a/runtime/fault_handler.cc b/runtime/fault_handler.cc
index a3c1f3b..7e5e745 100644
--- a/runtime/fault_handler.cc
+++ b/runtime/fault_handler.cc
@@ -223,7 +223,7 @@
return false;
}
-bool FaultManager::HandleSigbusFault(int sig, siginfo_t* info, void* context ATTRIBUTE_UNUSED) {
+bool FaultManager::HandleSigbusFault(int sig, siginfo_t* info, [[maybe_unused]] void* context) {
DCHECK_EQ(sig, SIGBUS);
if (VLOG_IS_ON(signals)) {
PrintSignalInfo(VLOG_STREAM(signals) << "Handling SIGBUS fault:\n", info);
@@ -578,7 +578,7 @@
manager_->AddHandler(this, false);
}
-bool JavaStackTraceHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* siginfo, void* context) {
+bool JavaStackTraceHandler::Action([[maybe_unused]] int sig, siginfo_t* siginfo, void* context) {
// Make sure that we are in the generated code, but we may not have a dex pc.
bool in_generated_code = manager_->IsInGeneratedCode(siginfo, context);
if (in_generated_code) {
diff --git a/runtime/gc/accounting/mod_union_table.cc b/runtime/gc/accounting/mod_union_table.cc
index 4a84799..85f7164 100644
--- a/runtime/gc/accounting/mod_union_table.cc
+++ b/runtime/gc/accounting/mod_union_table.cc
@@ -43,7 +43,7 @@
inline void operator()(uint8_t* card,
uint8_t expected_value,
- uint8_t new_value ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] uint8_t new_value) const {
if (expected_value == CardTable::kCardDirty) {
cleared_cards_->insert(card);
}
@@ -60,7 +60,7 @@
inline void operator()(uint8_t* card,
uint8_t expected_value,
- uint8_t new_value ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] uint8_t new_value) const {
if (expected_value == CardTable::kCardDirty) {
// We want the address the card represents, not the address of the card.
bitmap_->Set(reinterpret_cast<uintptr_t>(card_table_->AddrFromCard(card)));
@@ -78,7 +78,7 @@
: cleared_cards_(cleared_cards) {
}
- void operator()(uint8_t* card, uint8_t expected_card, uint8_t new_card ATTRIBUTE_UNUSED) const {
+ void operator()(uint8_t* card, uint8_t expected_card, [[maybe_unused]] uint8_t new_card) const {
if (expected_card == CardTable::kCardDirty) {
cleared_cards_->push_back(card);
}
@@ -100,7 +100,7 @@
contains_reference_to_other_space_(contains_reference_to_other_space) {}
// Extra parameters are required since we use this same visitor signature for checking objects.
- void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
+ void operator()(mirror::Object* obj, MemberOffset offset, [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) {
MarkReference(obj->GetFieldObjectReferenceAddr(offset));
}
@@ -195,7 +195,7 @@
has_target_reference_(has_target_reference) {}
// Extra parameters are required since we use this same visitor signature for checking objects.
- void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
+ void operator()(mirror::Object* obj, MemberOffset offset, [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) {
mirror::HeapReference<mirror::Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset);
mirror::Object* ref = ref_ptr->AsMirrorPtr();
@@ -270,7 +270,7 @@
references_(references) {}
// Extra parameters are required since we use this same visitor signature for checking objects.
- void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
+ void operator()(mirror::Object* obj, MemberOffset offset, [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset);
if (ref != nullptr &&
diff --git a/runtime/gc/accounting/mod_union_table_test.cc b/runtime/gc/accounting/mod_union_table_test.cc
index 3f38f50..f1f10d2 100644
--- a/runtime/gc/accounting/mod_union_table_test.cc
+++ b/runtime/gc/accounting/mod_union_table_test.cc
@@ -100,7 +100,7 @@
public:
explicit CollectVisitedVisitor(std::set<mirror::Object*>* out) : out_(out) {}
void MarkHeapReference(mirror::HeapReference<mirror::Object>* ref,
- bool do_atomic_update ATTRIBUTE_UNUSED) override
+ [[maybe_unused]] bool do_atomic_update) override
REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(ref != nullptr);
MarkObject(ref->AsMirrorPtr());
diff --git a/runtime/gc/accounting/remembered_set.cc b/runtime/gc/accounting/remembered_set.cc
index fba62c3..e4ee305 100644
--- a/runtime/gc/accounting/remembered_set.cc
+++ b/runtime/gc/accounting/remembered_set.cc
@@ -42,7 +42,7 @@
explicit RememberedSetCardVisitor(RememberedSet::CardSet* const dirty_cards)
: dirty_cards_(dirty_cards) {}
- void operator()(uint8_t* card, uint8_t expected_value, uint8_t new_value ATTRIBUTE_UNUSED) const {
+ void operator()(uint8_t* card, uint8_t expected_value, [[maybe_unused]] uint8_t new_value) const {
if (expected_value == CardTable::kCardDirty) {
dirty_cards_->insert(card);
}
@@ -69,8 +69,7 @@
void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(obj != nullptr);
mirror::HeapReference<mirror::Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset);
if (target_space_->HasAddress(ref_ptr->AsMirrorPtr())) {
diff --git a/runtime/gc/accounting/space_bitmap_test.cc b/runtime/gc/accounting/space_bitmap_test.cc
index 8fcf102..72fcef0 100644
--- a/runtime/gc/accounting/space_bitmap_test.cc
+++ b/runtime/gc/accounting/space_bitmap_test.cc
@@ -134,9 +134,7 @@
public:
explicit SimpleCounter(size_t* counter) : count_(counter) {}
- void operator()(mirror::Object* obj ATTRIBUTE_UNUSED) const {
- (*count_)++;
- }
+ void operator()([[maybe_unused]] mirror::Object* obj) const { (*count_)++; }
size_t* const count_;
};
@@ -203,9 +201,7 @@
uintptr_t range_end,
size_t manual_count) {
size_t count = 0;
- auto count_fn = [&count](mirror::Object* obj ATTRIBUTE_UNUSED) {
- count++;
- };
+ auto count_fn = [&count]([[maybe_unused]] mirror::Object* obj) { count++; };
space_bitmap->VisitMarkedRange(range_begin, range_end, count_fn);
EXPECT_EQ(count, manual_count);
};
diff --git a/runtime/gc/allocation_listener.h b/runtime/gc/allocation_listener.h
index 376b524..f286c6c 100644
--- a/runtime/gc/allocation_listener.h
+++ b/runtime/gc/allocation_listener.h
@@ -54,9 +54,9 @@
// PreObjectAlloc and the newly allocated object being visible to heap-walks.
//
// This can also be used to make any last-minute changes to the type or size of the allocation.
- virtual void PreObjectAllocated(Thread* self ATTRIBUTE_UNUSED,
- MutableHandle<mirror::Class> type ATTRIBUTE_UNUSED,
- size_t* byte_count ATTRIBUTE_UNUSED)
+ virtual void PreObjectAllocated([[maybe_unused]] Thread* self,
+ [[maybe_unused]] MutableHandle<mirror::Class> type,
+ [[maybe_unused]] size_t* byte_count)
REQUIRES(!Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_) {}
// Fast check if we want to get the PreObjectAllocated callback, to avoid the expense of creating
// handles. Defaults to false.
diff --git a/runtime/gc/allocator/art-dlmalloc.cc b/runtime/gc/allocator/art-dlmalloc.cc
index de0c85a..6296acd 100644
--- a/runtime/gc/allocator/art-dlmalloc.cc
+++ b/runtime/gc/allocator/art-dlmalloc.cc
@@ -83,8 +83,8 @@
}
}
-extern "C" void DlmallocBytesAllocatedCallback(void* start ATTRIBUTE_UNUSED,
- void* end ATTRIBUTE_UNUSED,
+extern "C" void DlmallocBytesAllocatedCallback([[maybe_unused]] void* start,
+ [[maybe_unused]] void* end,
size_t used_bytes,
void* arg) {
if (used_bytes == 0) {
@@ -94,8 +94,8 @@
*bytes_allocated += used_bytes + sizeof(size_t);
}
-extern "C" void DlmallocObjectsAllocatedCallback(void* start ATTRIBUTE_UNUSED,
- void* end ATTRIBUTE_UNUSED,
+extern "C" void DlmallocObjectsAllocatedCallback([[maybe_unused]] void* start,
+ [[maybe_unused]] void* end,
size_t used_bytes,
void* arg) {
if (used_bytes == 0) {
diff --git a/runtime/gc/allocator/rosalloc.cc b/runtime/gc/allocator/rosalloc.cc
index 320440d..656e29d 100644
--- a/runtime/gc/allocator/rosalloc.cc
+++ b/runtime/gc/allocator/rosalloc.cc
@@ -1720,8 +1720,10 @@
DCHECK_EQ(kMaxRegularBracketSize, bracketSizes[kNumRegularSizeBrackets - 1]);
}
-void RosAlloc::BytesAllocatedCallback(void* start ATTRIBUTE_UNUSED, void* end ATTRIBUTE_UNUSED,
- size_t used_bytes, void* arg) {
+void RosAlloc::BytesAllocatedCallback([[maybe_unused]] void* start,
+ [[maybe_unused]] void* end,
+ size_t used_bytes,
+ void* arg) {
if (used_bytes == 0) {
return;
}
@@ -1729,8 +1731,10 @@
*bytes_allocated += used_bytes;
}
-void RosAlloc::ObjectsAllocatedCallback(void* start ATTRIBUTE_UNUSED, void* end ATTRIBUTE_UNUSED,
- size_t used_bytes, void* arg) {
+void RosAlloc::ObjectsAllocatedCallback([[maybe_unused]] void* start,
+ [[maybe_unused]] void* end,
+ size_t used_bytes,
+ void* arg) {
if (used_bytes == 0) {
return;
}
diff --git a/runtime/gc/allocator/rosalloc.h b/runtime/gc/allocator/rosalloc.h
index 9a09c88..bb2f426 100644
--- a/runtime/gc/allocator/rosalloc.h
+++ b/runtime/gc/allocator/rosalloc.h
@@ -303,7 +303,7 @@
// The number of slots in the list. This is used to make it fast to check if a free list is all
// free without traversing the whole free list.
uint32_t size_;
- uint32_t padding_ ATTRIBUTE_UNUSED;
+ [[maybe_unused]] uint32_t padding_;
friend class RosAlloc;
};
@@ -354,7 +354,7 @@
uint8_t is_thread_local_; // True if this run is used as a thread-local run.
bool to_be_bulk_freed_; // Used within BulkFree() to flag a run that's involved with
// a bulk free.
- uint32_t padding_ ATTRIBUTE_UNUSED;
+ [[maybe_unused]] uint32_t padding_;
// Use a tailless free list for free_list_ so that the alloc fast path does not manage the tail.
SlotFreeList<false> free_list_;
SlotFreeList<true> bulk_free_list_;
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 1f123aa..3e95871 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -300,7 +300,7 @@
explicit ActivateReadBarrierEntrypointsCallback(ConcurrentCopying* concurrent_copying)
: concurrent_copying_(concurrent_copying) {}
- void Run(Thread* self ATTRIBUTE_UNUSED) override REQUIRES(Locks::thread_list_lock_) {
+ void Run([[maybe_unused]] Thread* self) override REQUIRES(Locks::thread_list_lock_) {
// This needs to run under the thread_list_lock_ critical section in ThreadList::RunCheckpoint()
// to avoid a race with ThreadList::Register().
CHECK(!concurrent_copying_->is_using_read_barrier_entrypoints_);
@@ -509,7 +509,7 @@
void VisitRoots(mirror::Object*** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) override
+ [[maybe_unused]] const RootInfo& info) override
REQUIRES_SHARED(Locks::mutator_lock_) {
Thread* self = Thread::Current();
for (size_t i = 0; i < count; ++i) {
@@ -526,7 +526,7 @@
void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) override
+ [[maybe_unused]] const RootInfo& info) override
REQUIRES_SHARED(Locks::mutator_lock_) {
Thread* self = Thread::Current();
for (size_t i = 0; i < count; ++i) {
@@ -700,7 +700,7 @@
void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
+ [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
if (offset.Uint32Value() != mirror::Object::ClassOffset().Uint32Value()) {
CheckReference(obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(
@@ -952,7 +952,7 @@
void VisitRoots(mirror::Object*** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) override
+ [[maybe_unused]] const RootInfo& info) override
REQUIRES_SHARED(Locks::mutator_lock_) {
for (size_t i = 0; i < count; ++i) {
mirror::Object** root = roots[i];
@@ -965,7 +965,7 @@
void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) override
+ [[maybe_unused]] const RootInfo& info) override
REQUIRES_SHARED(Locks::mutator_lock_) {
for (size_t i = 0; i < count; ++i) {
mirror::CompressedReference<mirror::Object>* const root = roots[i];
@@ -1770,7 +1770,7 @@
: concurrent_copying_(concurrent_copying) {
}
- void Run(Thread* self ATTRIBUTE_UNUSED) override REQUIRES(Locks::thread_list_lock_) {
+ void Run([[maybe_unused]] Thread* self) override REQUIRES(Locks::thread_list_lock_) {
// This needs to run under the thread_list_lock_ critical section in ThreadList::RunCheckpoint()
// to avoid a race with ThreadList::Register().
CHECK(concurrent_copying_->is_marking_);
@@ -1941,8 +1941,8 @@
}
}
- void VisitRoot(mirror::Object* root, const RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void VisitRoot(mirror::Object* root, [[maybe_unused]] const RootInfo& info) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(root != nullptr);
operator()(root);
}
@@ -1958,7 +1958,7 @@
void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
+ [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
mirror::Object* ref =
obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset);
@@ -2053,13 +2053,13 @@
void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
+ [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
mirror::Object* ref =
obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset);
collector_->AssertToSpaceInvariant(obj.Ptr(), offset, ref);
}
- void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<mirror::Class> klass, [[maybe_unused]] ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE {
CHECK(klass->IsTypeOfReferenceClass());
}
@@ -2417,7 +2417,7 @@
: concurrent_copying_(concurrent_copying) {
}
- void Run(Thread* self ATTRIBUTE_UNUSED) override REQUIRES(Locks::thread_list_lock_) {
+ void Run([[maybe_unused]] Thread* self) override REQUIRES(Locks::thread_list_lock_) {
// This needs to run under the thread_list_lock_ critical section in ThreadList::RunCheckpoint()
// to avoid a deadlock b/31500969.
CHECK(concurrent_copying_->weak_ref_access_enabled_);
@@ -3266,8 +3266,9 @@
}
// Process some roots.
-inline void ConcurrentCopying::VisitRoots(
- mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) {
+inline void ConcurrentCopying::VisitRoots(mirror::Object*** roots,
+ size_t count,
+ [[maybe_unused]] const RootInfo& info) {
Thread* const self = Thread::Current();
for (size_t i = 0; i < count; ++i) {
mirror::Object** root = roots[i];
@@ -3308,9 +3309,9 @@
}
}
-inline void ConcurrentCopying::VisitRoots(
- mirror::CompressedReference<mirror::Object>** roots, size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) {
+inline void ConcurrentCopying::VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
+ size_t count,
+ [[maybe_unused]] const RootInfo& info) {
Thread* const self = Thread::Current();
for (size_t i = 0; i < count; ++i) {
mirror::CompressedReference<mirror::Object>* const root = roots[i];
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index f9a4e72..ea303ec 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -625,7 +625,7 @@
public:
explicit FlipCallback(MarkCompact* collector) : collector_(collector) {}
- void Run(Thread* thread ATTRIBUTE_UNUSED) override REQUIRES(Locks::mutator_lock_) {
+ void Run([[maybe_unused]] Thread* thread) override REQUIRES(Locks::mutator_lock_) {
collector_->CompactionPause();
}
@@ -851,7 +851,7 @@
explicit ConcurrentCompactionGcTask(MarkCompact* collector, size_t idx)
: collector_(collector), index_(idx) {}
- void Run(Thread* self ATTRIBUTE_UNUSED) override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void Run([[maybe_unused]] Thread* self) override REQUIRES_SHARED(Locks::mutator_lock_) {
if (collector_->CanCompactMovingSpaceWithMinorFault()) {
collector_->ConcurrentCompaction<MarkCompact::kMinorFaultMode>(/*buf=*/nullptr);
} else {
@@ -1331,9 +1331,10 @@
DCHECK(!kCheckEnd || end != nullptr);
}
- void operator()(mirror::Object* old ATTRIBUTE_UNUSED, MemberOffset offset, bool /* is_static */)
- const ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
+ void operator()([[maybe_unused]] mirror::Object* old,
+ MemberOffset offset,
+ [[maybe_unused]] bool is_static) const ALWAYS_INLINE
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
bool update = true;
if (kCheckBegin || kCheckEnd) {
uint8_t* ref = reinterpret_cast<uint8_t*>(obj_) + offset.Int32Value();
@@ -1348,12 +1349,11 @@
// VisitReferenes().
// TODO: Optimize reference updating using SIMD instructions. Object arrays
// are perfect as all references are tightly packed.
- void operator()(mirror::Object* old ATTRIBUTE_UNUSED,
+ void operator()([[maybe_unused]] mirror::Object* old,
MemberOffset offset,
- bool /*is_static*/,
- bool /*is_obj_array*/)
- const ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
+ [[maybe_unused]] bool is_static,
+ [[maybe_unused]] bool is_obj_array) const ALWAYS_INLINE
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
collector_->UpdateRef(obj_, offset);
}
@@ -1455,51 +1455,38 @@
<< " start_addr=" << static_cast<void*>(start_addr);
};
obj = GetFromSpaceAddr(obj);
- live_words_bitmap_->VisitLiveStrides(offset,
- black_allocations_begin_,
- kPageSize,
- [&addr,
- &last_stride,
- &stride_count,
- &last_stride_begin,
- verify_obj_callback,
- this] (uint32_t stride_begin,
- size_t stride_size,
- bool /*is_last*/)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- const size_t stride_in_bytes = stride_size * kAlignment;
- DCHECK_LE(stride_in_bytes, kPageSize);
- last_stride_begin = stride_begin;
- DCHECK(IsAligned<kAlignment>(addr));
- memcpy(addr,
- from_space_begin_ + stride_begin * kAlignment,
- stride_in_bytes);
- if (kIsDebugBuild) {
- uint8_t* space_begin = bump_pointer_space_->Begin();
- // We can interpret the first word of the stride as an
- // obj only from second stride onwards, as the first
- // stride's first-object may have started on previous
- // page. The only exception is the first page of the
- // moving space.
- if (stride_count > 0
- || stride_begin * kAlignment < kPageSize) {
- mirror::Object* o =
- reinterpret_cast<mirror::Object*>(space_begin
- + stride_begin
- * kAlignment);
- CHECK(live_words_bitmap_->Test(o)) << "ref=" << o;
- CHECK(moving_space_bitmap_->Test(o))
- << "ref=" << o
- << " bitmap: "
- << moving_space_bitmap_->DumpMemAround(o);
- VerifyObject(reinterpret_cast<mirror::Object*>(addr),
- verify_obj_callback);
- }
- }
- last_stride = addr;
- addr += stride_in_bytes;
- stride_count++;
- });
+ live_words_bitmap_->VisitLiveStrides(
+ offset,
+ black_allocations_begin_,
+ kPageSize,
+ [&addr, &last_stride, &stride_count, &last_stride_begin, verify_obj_callback, this](
+ uint32_t stride_begin, size_t stride_size, [[maybe_unused]] bool is_last)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ const size_t stride_in_bytes = stride_size * kAlignment;
+ DCHECK_LE(stride_in_bytes, kPageSize);
+ last_stride_begin = stride_begin;
+ DCHECK(IsAligned<kAlignment>(addr));
+ memcpy(addr, from_space_begin_ + stride_begin * kAlignment, stride_in_bytes);
+ if (kIsDebugBuild) {
+ uint8_t* space_begin = bump_pointer_space_->Begin();
+ // We can interpret the first word of the stride as an
+ // obj only from second stride onwards, as the first
+ // stride's first-object may have started on previous
+ // page. The only exception is the first page of the
+ // moving space.
+ if (stride_count > 0 || stride_begin * kAlignment < kPageSize) {
+ mirror::Object* o =
+ reinterpret_cast<mirror::Object*>(space_begin + stride_begin * kAlignment);
+ CHECK(live_words_bitmap_->Test(o)) << "ref=" << o;
+ CHECK(moving_space_bitmap_->Test(o))
+ << "ref=" << o << " bitmap: " << moving_space_bitmap_->DumpMemAround(o);
+ VerifyObject(reinterpret_cast<mirror::Object*>(addr), verify_obj_callback);
+ }
+ }
+ last_stride = addr;
+ addr += stride_in_bytes;
+ stride_count++;
+ });
DCHECK_LT(last_stride, start_addr + kPageSize);
DCHECK_GT(stride_count, 0u);
size_t obj_size = 0;
@@ -3580,9 +3567,10 @@
Flush();
}
- void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(Locks::heap_bitmap_lock_) {
+ void VisitRoots(mirror::Object*** roots,
+ size_t count,
+ [[maybe_unused]] const RootInfo& info) override
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
for (size_t i = 0; i < count; i++) {
mirror::Object* obj = *roots[i];
if (mark_compact_->MarkObjectNonNullNoPush</*kParallel*/true>(obj)) {
@@ -3593,9 +3581,8 @@
void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(Locks::heap_bitmap_lock_) {
+ [[maybe_unused]] const RootInfo& info) override
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
for (size_t i = 0; i < count; i++) {
mirror::Object* obj = roots[i]->AsMirrorPtr();
if (mark_compact_->MarkObjectNonNullNoPush</*kParallel*/true>(obj)) {
@@ -3762,9 +3749,7 @@
accounting::CardTable* const card_table)
: visitor_(mark_compact), bitmap_(bitmap), card_table_(card_table) {}
- void operator()(uint8_t* card,
- uint8_t expected_value,
- uint8_t new_value ATTRIBUTE_UNUSED) const {
+ void operator()(uint8_t* card, uint8_t expected_value, [[maybe_unused]] uint8_t new_value) const {
if (expected_value == accounting::CardTable::kCardDirty) {
uintptr_t start = reinterpret_cast<uintptr_t>(card_table_->AddrFromCard(card));
bitmap_->VisitMarkedRange(start, start + accounting::CardTable::kCardSize, visitor_);
@@ -3917,9 +3902,8 @@
ALWAYS_INLINE void operator()(mirror::Object* obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
- REQUIRES(Locks::heap_bitmap_lock_)
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] bool is_static) const
+ REQUIRES(Locks::heap_bitmap_lock_) REQUIRES_SHARED(Locks::mutator_lock_) {
if (kCheckLocks) {
Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current());
@@ -4096,7 +4080,7 @@
}
void MarkCompact::MarkHeapReference(mirror::HeapReference<mirror::Object>* obj,
- bool do_atomic_update ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] bool do_atomic_update) {
MarkObject(obj->AsMirrorPtr(), nullptr, MemberOffset(0));
}
@@ -4166,7 +4150,7 @@
}
bool MarkCompact::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* obj,
- bool do_atomic_update ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] bool do_atomic_update) {
mirror::Object* ref = obj->AsMirrorPtr();
if (ref == nullptr) {
return true;
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 4fefe65..5209354 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -416,7 +416,7 @@
}
bool MarkSweep::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref,
- bool do_atomic_update ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] bool do_atomic_update) {
mirror::Object* obj = ref->AsMirrorPtr();
if (obj == nullptr) {
return true;
@@ -558,7 +558,7 @@
}
void MarkSweep::MarkHeapReference(mirror::HeapReference<mirror::Object>* ref,
- bool do_atomic_update ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] bool do_atomic_update) {
MarkObject(ref->AsMirrorPtr(), nullptr, MemberOffset(0));
}
@@ -588,7 +588,7 @@
void MarkSweep::VisitRoots(mirror::Object*** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] const RootInfo& info) {
for (size_t i = 0; i < count; ++i) {
MarkObjectNonNull(*roots[i]);
}
@@ -596,7 +596,7 @@
void MarkSweep::VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] const RootInfo& info) {
for (size_t i = 0; i < count; ++i) {
MarkObjectNonNull(roots[i]->AsMirrorPtr());
}
@@ -698,8 +698,8 @@
: chunk_task_(chunk_task), mark_sweep_(mark_sweep) {}
ALWAYS_INLINE void operator()(mirror::Object* obj,
- MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
+ MemberOffset offset,
+ [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) {
Mark(obj->GetFieldObject<mirror::Object>(offset));
}
@@ -793,8 +793,7 @@
}
// Scans all of the objects
- void Run(Thread* self ATTRIBUTE_UNUSED) override
- REQUIRES(Locks::heap_bitmap_lock_)
+ void Run([[maybe_unused]] Thread* self) override REQUIRES(Locks::heap_bitmap_lock_)
REQUIRES_SHARED(Locks::mutator_lock_) {
ScanObjectParallelVisitor visitor(this);
// TODO: Tune this.
@@ -1142,9 +1141,10 @@
revoke_ros_alloc_thread_local_buffers_at_checkpoint) {
}
- void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(Locks::heap_bitmap_lock_) {
+ void VisitRoots(mirror::Object*** roots,
+ size_t count,
+ [[maybe_unused]] const RootInfo& info) override
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
for (size_t i = 0; i < count; ++i) {
mark_sweep_->MarkObjectNonNullParallel(*roots[i]);
}
@@ -1152,9 +1152,8 @@
void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(Locks::heap_bitmap_lock_) {
+ [[maybe_unused]] const RootInfo& info) override
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
for (size_t i = 0; i < count; ++i) {
mark_sweep_->MarkObjectNonNullParallel(roots[i]->AsMirrorPtr());
}
@@ -1352,9 +1351,8 @@
ALWAYS_INLINE void operator()(mirror::Object* obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
- REQUIRES(Locks::heap_bitmap_lock_)
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] bool is_static) const
+ REQUIRES(Locks::heap_bitmap_lock_) REQUIRES_SHARED(Locks::mutator_lock_) {
if (kCheckLocks) {
Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current());
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index acd4807..a7e2b59 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -467,12 +467,13 @@
}
void SemiSpace::MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr,
- bool do_atomic_update ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] bool do_atomic_update) {
MarkObject(obj_ptr);
}
-void SemiSpace::VisitRoots(mirror::Object*** roots, size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) {
+void SemiSpace::VisitRoots(mirror::Object*** roots,
+ size_t count,
+ [[maybe_unused]] const RootInfo& info) {
for (size_t i = 0; i < count; ++i) {
auto* root = roots[i];
auto ref = StackReference<mirror::Object>::FromMirrorPtr(*root);
@@ -485,8 +486,9 @@
}
}
-void SemiSpace::VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
- const RootInfo& info ATTRIBUTE_UNUSED) {
+void SemiSpace::VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
+ size_t count,
+ [[maybe_unused]] const RootInfo& info) {
for (size_t i = 0; i < count; ++i) {
MarkObjectIfNotInToSpace(roots[i]);
}
@@ -610,7 +612,7 @@
bool SemiSpace::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* object,
// SemiSpace does the GC in a pause. No CAS needed.
- bool do_atomic_update ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] bool do_atomic_update) {
mirror::Object* obj = object->AsMirrorPtr();
if (obj == nullptr) {
return true;
diff --git a/runtime/gc/collector/sticky_mark_sweep.cc b/runtime/gc/collector/sticky_mark_sweep.cc
index d93bd89..e1bd16c 100644
--- a/runtime/gc/collector/sticky_mark_sweep.cc
+++ b/runtime/gc/collector/sticky_mark_sweep.cc
@@ -73,7 +73,7 @@
static_cast<VisitRootFlags>(flags | kVisitRootFlagClassLoader));
}
-void StickyMarkSweep::Sweep(bool swap_bitmaps ATTRIBUTE_UNUSED) {
+void StickyMarkSweep::Sweep([[maybe_unused]] bool swap_bitmaps) {
SweepArray(GetHeap()->GetLiveStack(), false);
}
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index 922b588..0f1a44f 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -441,7 +441,7 @@
return byte_count >= large_object_threshold_ && (c->IsPrimitiveArray() || c->IsStringClass());
}
-inline bool Heap::IsOutOfMemoryOnAllocation(AllocatorType allocator_type ATTRIBUTE_UNUSED,
+inline bool Heap::IsOutOfMemoryOnAllocation([[maybe_unused]] AllocatorType allocator_type,
size_t alloc_size,
bool grow) {
size_t old_target = target_footprint_.load(std::memory_order_relaxed);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index f27bddb..381271f 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -132,7 +132,7 @@
// Disable the heap sampler Callback function used by Perfetto.
void DisableHeapSamplerCallback(void* disable_ptr,
- const AHeapProfileDisableCallbackInfo* info_ptr ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] const AHeapProfileDisableCallbackInfo* info_ptr) {
HeapSampler* sampler_self = reinterpret_cast<HeapSampler*>(disable_ptr);
sampler_self->DisableHeapSampler();
}
@@ -2342,7 +2342,7 @@
}
}
- bool ShouldSweepSpace(space::ContinuousSpace* space ATTRIBUTE_UNUSED) const override {
+ bool ShouldSweepSpace([[maybe_unused]] space::ContinuousSpace* space) const override {
// Don't sweep any spaces since we probably blasted the internal accounting of the free list
// allocator.
return false;
@@ -2986,7 +2986,7 @@
CHECK_EQ(self_, Thread::Current());
}
- void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, ObjPtr<mirror::Reference> ref) const
+ void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
REQUIRES_SHARED(Locks::mutator_lock_) {
if (verify_referent_) {
VerifyReference(ref.Ptr(), ref->GetReferent(), mirror::Reference::ReferentOffset());
@@ -2995,8 +2995,7 @@
void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
VerifyReference(obj.Ptr(), obj->GetFieldObject<mirror::Object>(offset), offset);
}
@@ -3251,9 +3250,9 @@
}
// There is no card marks for native roots on a class.
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+ void VisitRootIfNonNull(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
+ void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
// TODO: Fix lock analysis to not use NO_THREAD_SAFETY_ANALYSIS, requires support for
// annotalysis on visitors.
@@ -3502,7 +3501,7 @@
}
}
-void Heap::PrePauseRosAllocVerification(collector::GarbageCollector* gc ATTRIBUTE_UNUSED) {
+void Heap::PrePauseRosAllocVerification([[maybe_unused]] collector::GarbageCollector* gc) {
// TODO: Add a new runtime option for this?
if (verify_pre_gc_rosalloc_) {
RosAllocVerification(current_gc_iteration_.GetTimings(), "PreGcRosAllocVerification");
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index 56efcab..f4af50f 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -332,7 +332,7 @@
}
template <typename T>
- T* operator()(T* ptr, void** dest_addr ATTRIBUTE_UNUSED) const {
+ T* operator()(T* ptr, [[maybe_unused]] void** dest_addr) const {
return (ptr != nullptr) ? native_visitor_(ptr) : nullptr;
}
@@ -373,9 +373,9 @@
this->operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
}
// Ignore class native roots; not called from VisitReferences() for kVisitNativeRoots == false.
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+ void VisitRootIfNonNull(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
+ void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
template <typename T> void VisitNativeDexCacheArray(mirror::NativeArray<T>* array)
REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -516,8 +516,8 @@
// Visitor for VisitReferences().
ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> object,
MemberOffset field_offset,
- bool is_static ATTRIBUTE_UNUSED)
- const REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] bool is_static) const
+ REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::Object> old_value =
object->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(field_offset);
if (old_value != nullptr &&
@@ -538,9 +538,9 @@
this->operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
}
// Ignore class native roots; not called from VisitReferences() for kVisitNativeRoots == false.
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+ void VisitRootIfNonNull(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
+ void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
private:
mirror::Class* GetStringClass() REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -1179,15 +1179,14 @@
// Fix up separately since we also need to fix up method entrypoints.
ALWAYS_INLINE void VisitRootIfNonNull(
- mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
- ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {}
+ ALWAYS_INLINE void VisitRoot(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj,
MemberOffset offset,
- bool is_static ATTRIBUTE_UNUSED) const
- NO_THREAD_SAFETY_ANALYSIS {
+ [[maybe_unused]] bool is_static) const NO_THREAD_SAFETY_ANALYSIS {
// Space is not yet added to the heap, don't do a read barrier.
mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(
offset);
@@ -1898,7 +1897,7 @@
// TODO: Rewrite ProfileCompilationInfo to provide a better interface and
// to store the dex locations in uncompressed section of the file.
auto collect_fn = [&dex_locations](const std::string& dex_location,
- uint32_t checksum ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] uint32_t checksum) {
dex_locations.insert(dex_location); // Just collect locations.
return false; // Do not read the profile data.
};
@@ -2188,8 +2187,8 @@
bool allow_in_memory_compilation,
/*out*/ std::string* error_msg) {
auto filename_fn = [image_isa](const std::string& location,
- /*out*/std::string* filename,
- /*out*/std::string* err_msg ATTRIBUTE_UNUSED) {
+ /*out*/ std::string* filename,
+ [[maybe_unused]] /*out*/ std::string* err_msg) {
*filename = GetSystemImageFilename(location.c_str(), image_isa);
return true;
};
diff --git a/runtime/gc/space/large_object_space.cc b/runtime/gc/space/large_object_space.cc
index f1df45f..80ed9b3 100644
--- a/runtime/gc/space/large_object_space.cc
+++ b/runtime/gc/space/large_object_space.cc
@@ -585,7 +585,7 @@
}
}
-bool FreeListSpace::IsZygoteLargeObject(Thread* self ATTRIBUTE_UNUSED, mirror::Object* obj) const {
+bool FreeListSpace::IsZygoteLargeObject([[maybe_unused]] Thread* self, mirror::Object* obj) const {
const AllocationInfo* info = GetAllocationInfoForAddress(reinterpret_cast<uintptr_t>(obj));
DCHECK(info != nullptr);
return info->IsZygoteObject();
diff --git a/runtime/gc/space/memory_tool_malloc_space.h b/runtime/gc/space/memory_tool_malloc_space.h
index 33bddfa..ce72b5b 100644
--- a/runtime/gc/space/memory_tool_malloc_space.h
+++ b/runtime/gc/space/memory_tool_malloc_space.h
@@ -48,7 +48,7 @@
size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) override
REQUIRES_SHARED(Locks::mutator_lock_);
- void RegisterRecentFree(mirror::Object* ptr ATTRIBUTE_UNUSED) override {}
+ void RegisterRecentFree([[maybe_unused]] mirror::Object* ptr) override {}
size_t MaxBytesBulkAllocatedFor(size_t num_bytes) override;
diff --git a/runtime/gc/space/region_space-inl.h b/runtime/gc/space/region_space-inl.h
index 1026f42..4376137 100644
--- a/runtime/gc/space/region_space-inl.h
+++ b/runtime/gc/space/region_space-inl.h
@@ -26,7 +26,7 @@
namespace gc {
namespace space {
-inline mirror::Object* RegionSpace::Alloc(Thread* self ATTRIBUTE_UNUSED,
+inline mirror::Object* RegionSpace::Alloc([[maybe_unused]] Thread* self,
size_t num_bytes,
/* out */ size_t* bytes_allocated,
/* out */ size_t* usable_size,
diff --git a/runtime/gc/space/zygote_space.cc b/runtime/gc/space/zygote_space.cc
index c5e3a70..f40061f 100644
--- a/runtime/gc/space/zygote_space.cc
+++ b/runtime/gc/space/zygote_space.cc
@@ -34,9 +34,7 @@
explicit CountObjectsAllocated(size_t* objects_allocated)
: objects_allocated_(objects_allocated) {}
- void operator()(mirror::Object* obj ATTRIBUTE_UNUSED) const {
- ++*objects_allocated_;
- }
+ void operator()([[maybe_unused]] mirror::Object* obj) const { ++*objects_allocated_; }
private:
size_t* const objects_allocated_;
diff --git a/runtime/gc/system_weak.h b/runtime/gc/system_weak.h
index 77b9548..57d593c 100644
--- a/runtime/gc/system_weak.h
+++ b/runtime/gc/system_weak.h
@@ -62,7 +62,7 @@
allow_new_system_weak_ = false;
}
- void Broadcast(bool broadcast_for_checkpoint ATTRIBUTE_UNUSED) override
+ void Broadcast([[maybe_unused]] bool broadcast_for_checkpoint) override
REQUIRES(!allow_disallow_lock_) {
MutexLock mu(Thread::Current(), allow_disallow_lock_);
new_weak_condition_.Broadcast(Thread::Current());
diff --git a/runtime/gc/task_processor_test.cc b/runtime/gc/task_processor_test.cc
index 7cb678b..a3666e0 100644
--- a/runtime/gc/task_processor_test.cc
+++ b/runtime/gc/task_processor_test.cc
@@ -105,7 +105,7 @@
TestOrderTask(uint64_t expected_time, size_t expected_counter, size_t* counter)
: HeapTask(expected_time), expected_counter_(expected_counter), counter_(counter) {
}
- void Run(Thread* thread ATTRIBUTE_UNUSED) override {
+ void Run([[maybe_unused]] Thread* thread) override {
ASSERT_EQ(*counter_, expected_counter_);
++*counter_;
}
diff --git a/runtime/gc/verification.cc b/runtime/gc/verification.cc
index 195986f..ad04860 100644
--- a/runtime/gc/verification.cc
+++ b/runtime/gc/verification.cc
@@ -133,7 +133,7 @@
public:
explicit BFSFindReachable(ObjectSet* visited) : visited_(visited) {}
- void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
+ void operator()(mirror::Object* obj, MemberOffset offset, [[maybe_unused]] bool is_static) const
REQUIRES_SHARED(Locks::mutator_lock_) {
ArtField* field = obj->FindFieldByOffset(offset);
Visit(obj->GetFieldObject<mirror::Object>(offset),
diff --git a/runtime/handle_scope.h b/runtime/handle_scope.h
index a43e889..0c50312 100644
--- a/runtime/handle_scope.h
+++ b/runtime/handle_scope.h
@@ -122,9 +122,7 @@
ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
// Offset of link within HandleScope, used by generated code.
- static constexpr size_t LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED) {
- return 0;
- }
+ static constexpr size_t LinkOffset([[maybe_unused]] PointerSize pointer_size) { return 0; }
// Offset of length within handle scope, used by generated code.
static constexpr size_t NumberOfReferencesOffset(PointerSize pointer_size) {
diff --git a/runtime/hprof/hprof.cc b/runtime/hprof/hprof.cc
index 5e4a5f3..ea64cc1 100644
--- a/runtime/hprof/hprof.cc
+++ b/runtime/hprof/hprof.cc
@@ -237,7 +237,7 @@
HandleU4List(values, count);
length_ += count * sizeof(uint32_t);
}
- virtual void UpdateU4(size_t offset, uint32_t new_value ATTRIBUTE_UNUSED) {
+ virtual void UpdateU4(size_t offset, [[maybe_unused]] uint32_t new_value) {
DCHECK_LE(offset, length_ - 4);
}
void AddU8List(const uint64_t* values, size_t count) {
@@ -271,21 +271,16 @@
}
protected:
- virtual void HandleU1List(const uint8_t* values ATTRIBUTE_UNUSED,
- size_t count ATTRIBUTE_UNUSED) {
+ virtual void HandleU1List([[maybe_unused]] const uint8_t* values, [[maybe_unused]] size_t count) {
}
- virtual void HandleU1AsU2List(const uint8_t* values ATTRIBUTE_UNUSED,
- size_t count ATTRIBUTE_UNUSED) {
- }
- virtual void HandleU2List(const uint16_t* values ATTRIBUTE_UNUSED,
- size_t count ATTRIBUTE_UNUSED) {
- }
- virtual void HandleU4List(const uint32_t* values ATTRIBUTE_UNUSED,
- size_t count ATTRIBUTE_UNUSED) {
- }
- virtual void HandleU8List(const uint64_t* values ATTRIBUTE_UNUSED,
- size_t count ATTRIBUTE_UNUSED) {
- }
+ virtual void HandleU1AsU2List([[maybe_unused]] const uint8_t* values,
+ [[maybe_unused]] size_t count) {}
+ virtual void HandleU2List([[maybe_unused]] const uint16_t* values,
+ [[maybe_unused]] size_t count) {}
+ virtual void HandleU4List([[maybe_unused]] const uint32_t* values,
+ [[maybe_unused]] size_t count) {}
+ virtual void HandleU8List([[maybe_unused]] const uint64_t* values,
+ [[maybe_unused]] size_t count) {}
virtual void HandleEndRecord() {
}
@@ -382,7 +377,7 @@
buffer_.clear();
}
- virtual void HandleFlush(const uint8_t* buffer ATTRIBUTE_UNUSED, size_t length ATTRIBUTE_UNUSED) {
+ virtual void HandleFlush([[maybe_unused]] const uint8_t* buffer, [[maybe_unused]] size_t length) {
}
std::vector<uint8_t> buffer_;
@@ -743,7 +738,7 @@
}
}
- bool DumpToDdmsBuffered(size_t overall_size ATTRIBUTE_UNUSED, size_t max_length ATTRIBUTE_UNUSED)
+ bool DumpToDdmsBuffered([[maybe_unused]] size_t overall_size, [[maybe_unused]] size_t max_length)
REQUIRES(Locks::mutator_lock_) {
LOG(FATAL) << "Unimplemented";
UNREACHABLE();
diff --git a/runtime/image.h b/runtime/image.h
index 324cd3c..5580e27 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -274,7 +274,7 @@
kSectionCount, // Number of elements in enum.
};
- static size_t NumberOfImageRoots(bool app_image ATTRIBUTE_UNUSED) {
+ static size_t NumberOfImageRoots([[maybe_unused]] bool app_image) {
// At the moment, boot image and app image have the same number of roots,
// though the meaning of the kSpecialRoots is different.
return kImageRootsMax;
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index 5ce2b10..ba0d63d 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -1073,7 +1073,7 @@
}
}
-static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
+static void ResetQuickAllocEntryPointsForThread(Thread* thread, [[maybe_unused]] void* arg) {
thread->ResetQuickAllocEntryPointsForThread();
}
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index 144ee09..7676080 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -149,8 +149,8 @@
// Call-back when a shadow_frame with the needs_notify_pop_ boolean set is popped off the stack by
// either return or exceptions. Normally instrumentation listeners should ensure that there are
// shadow-frames by deoptimizing stacks.
- virtual void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED,
- const ShadowFrame& frame ATTRIBUTE_UNUSED)
+ virtual void WatchedFramePop([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] const ShadowFrame& frame)
REQUIRES_SHARED(Locks::mutator_lock_) = 0;
};
diff --git a/runtime/instrumentation_test.cc b/runtime/instrumentation_test.cc
index fc05298..1e98e57 100644
--- a/runtime/instrumentation_test.cc
+++ b/runtime/instrumentation_test.cc
@@ -55,93 +55,93 @@
virtual ~TestInstrumentationListener() {}
- void MethodEntered(Thread* thread ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED) override
+ void MethodEntered([[maybe_unused]] Thread* thread, [[maybe_unused]] ArtMethod* method) override
REQUIRES_SHARED(Locks::mutator_lock_) {
received_method_enter_event = true;
}
- void MethodExited(Thread* thread ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED,
- MutableHandle<mirror::Object>& return_value ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void MethodExited([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] instrumentation::OptionalFrame frame,
+ [[maybe_unused]] MutableHandle<mirror::Object>& return_value) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_method_exit_object_event = true;
}
- void MethodExited(Thread* thread ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED,
- JValue& return_value ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void MethodExited([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] instrumentation::OptionalFrame frame,
+ [[maybe_unused]] JValue& return_value) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_method_exit_event = true;
}
- void MethodUnwind(Thread* thread ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void MethodUnwind([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_method_unwind_event = true;
}
- void DexPcMoved(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Object> this_object ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t new_dex_pc ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void DexPcMoved([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Object> this_object,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] uint32_t new_dex_pc) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_dex_pc_moved_event = true;
}
- void FieldRead(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Object> this_object ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- ArtField* field ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void FieldRead([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Object> this_object,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] ArtField* field) override REQUIRES_SHARED(Locks::mutator_lock_) {
received_field_read_event = true;
}
- void FieldWritten(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Object> this_object ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- ArtField* field ATTRIBUTE_UNUSED,
- Handle<mirror::Object> field_value ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void FieldWritten([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Object> this_object,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] ArtField* field,
+ [[maybe_unused]] Handle<mirror::Object> field_value) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_field_written_object_event = true;
}
- void FieldWritten(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Object> this_object ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- ArtField* field ATTRIBUTE_UNUSED,
- const JValue& field_value ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void FieldWritten([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Object> this_object,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] ArtField* field,
+ [[maybe_unused]] const JValue& field_value) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_field_written_event = true;
}
- void ExceptionThrown(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Throwable> exception_object ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void ExceptionThrown([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Throwable> exception_object) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_exception_thrown_event = true;
}
- void ExceptionHandled(Thread* self ATTRIBUTE_UNUSED,
- Handle<mirror::Throwable> throwable ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void ExceptionHandled([[maybe_unused]] Thread* self,
+ [[maybe_unused]] Handle<mirror::Throwable> throwable) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_exception_handled_event = true;
}
- void Branch(Thread* thread ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- int32_t dex_pc_offset ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void Branch([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] int32_t dex_pc_offset) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_branch_event = true;
}
- void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED, const ShadowFrame& frame ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void WatchedFramePop([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] const ShadowFrame& frame) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
received_watched_frame_pop = true;
}
diff --git a/runtime/interpreter/unstarted_runtime.cc b/runtime/interpreter/unstarted_runtime.cc
index 32ed430..8d3d2d6 100644
--- a/runtime/interpreter/unstarted_runtime.cc
+++ b/runtime/interpreter/unstarted_runtime.cc
@@ -813,8 +813,10 @@
}
}
-void UnstartedRuntime::UnstartedSystemArraycopy(
- Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) {
+void UnstartedRuntime::UnstartedSystemArraycopy(Thread* self,
+ ShadowFrame* shadow_frame,
+ [[maybe_unused]] JValue* result,
+ size_t arg_offset) {
// Special case array copying without initializing System.
jint src_pos = shadow_frame->GetVReg(arg_offset + 1);
jint dst_pos = shadow_frame->GetVReg(arg_offset + 3);
@@ -930,9 +932,10 @@
UnstartedRuntime::UnstartedSystemArraycopy(self, shadow_frame, result, arg_offset);
}
-void UnstartedRuntime::UnstartedSystemGetSecurityManager(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame ATTRIBUTE_UNUSED,
- JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) {
+void UnstartedRuntime::UnstartedSystemGetSecurityManager([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ShadowFrame* shadow_frame,
+ JValue* result,
+ [[maybe_unused]] size_t arg_offset) {
result->SetL(nullptr);
}
@@ -1089,8 +1092,10 @@
return nullptr;
}
-void UnstartedRuntime::UnstartedThreadLocalGet(
- Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) {
+void UnstartedRuntime::UnstartedThreadLocalGet(Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ [[maybe_unused]] size_t arg_offset) {
if (CheckCallers(shadow_frame, { "jdk.internal.math.FloatingDecimal$BinaryToASCIIBuffer "
"jdk.internal.math.FloatingDecimal.getBinaryToASCIIBuffer()" })) {
result->SetL(CreateInstanceOf(self, "Ljdk/internal/math/FloatingDecimal$BinaryToASCIIBuffer;"));
@@ -1101,8 +1106,10 @@
}
}
-void UnstartedRuntime::UnstartedThreadCurrentThread(
- Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) {
+void UnstartedRuntime::UnstartedThreadCurrentThread(Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ [[maybe_unused]] size_t arg_offset) {
if (CheckCallers(shadow_frame,
{ "void java.lang.Thread.<init>(java.lang.ThreadGroup, java.lang.Runnable, "
"java.lang.String, long, java.security.AccessControlContext, boolean)",
@@ -1131,8 +1138,10 @@
}
}
-void UnstartedRuntime::UnstartedThreadGetNativeState(
- Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) {
+void UnstartedRuntime::UnstartedThreadGetNativeState(Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ [[maybe_unused]] size_t arg_offset) {
if (CheckCallers(shadow_frame,
{ "java.lang.Thread$State java.lang.Thread.getState()",
"java.lang.ThreadGroup java.lang.Thread.getThreadGroup()",
@@ -1154,45 +1163,61 @@
}
}
-void UnstartedRuntime::UnstartedMathCeil(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMathCeil([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
result->SetD(ceil(shadow_frame->GetVRegDouble(arg_offset)));
}
-void UnstartedRuntime::UnstartedMathFloor(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMathFloor([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
result->SetD(floor(shadow_frame->GetVRegDouble(arg_offset)));
}
-void UnstartedRuntime::UnstartedMathSin(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMathSin([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
result->SetD(sin(shadow_frame->GetVRegDouble(arg_offset)));
}
-void UnstartedRuntime::UnstartedMathCos(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMathCos([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
result->SetD(cos(shadow_frame->GetVRegDouble(arg_offset)));
}
-void UnstartedRuntime::UnstartedMathPow(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMathPow([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
result->SetD(pow(shadow_frame->GetVRegDouble(arg_offset),
shadow_frame->GetVRegDouble(arg_offset + 2)));
}
-void UnstartedRuntime::UnstartedMathTan(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMathTan([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
result->SetD(tan(shadow_frame->GetVRegDouble(arg_offset)));
}
-void UnstartedRuntime::UnstartedObjectHashCode(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedObjectHashCode([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
mirror::Object* obj = shadow_frame->GetVRegReference(arg_offset);
result->SetI(obj->IdentityHashCode());
}
-void UnstartedRuntime::UnstartedDoubleDoubleToRawLongBits(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedDoubleDoubleToRawLongBits([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
double in = shadow_frame->GetVRegDouble(arg_offset);
result->SetJ(bit_cast<int64_t, double>(in));
}
@@ -1240,23 +1265,31 @@
UNREACHABLE();
}
-void UnstartedRuntime::UnstartedMemoryPeekByte(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMemoryPeekByte([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
UnstartedMemoryPeek(Primitive::kPrimByte, shadow_frame, result, arg_offset);
}
-void UnstartedRuntime::UnstartedMemoryPeekShort(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMemoryPeekShort([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
UnstartedMemoryPeek(Primitive::kPrimShort, shadow_frame, result, arg_offset);
}
-void UnstartedRuntime::UnstartedMemoryPeekInt(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMemoryPeekInt([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
UnstartedMemoryPeek(Primitive::kPrimInt, shadow_frame, result, arg_offset);
}
-void UnstartedRuntime::UnstartedMemoryPeekLong(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMemoryPeekLong([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset) {
UnstartedMemoryPeek(Primitive::kPrimLong, shadow_frame, result, arg_offset);
}
@@ -1309,14 +1342,18 @@
UNREACHABLE();
}
-void UnstartedRuntime::UnstartedMemoryPeekByteArray(
- Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) {
+void UnstartedRuntime::UnstartedMemoryPeekByteArray(Thread* self,
+ ShadowFrame* shadow_frame,
+ [[maybe_unused]] JValue* result,
+ size_t arg_offset) {
UnstartedMemoryPeekArray(Primitive::kPrimByte, self, shadow_frame, arg_offset);
}
// This allows reading the new style of String objects during compilation.
-void UnstartedRuntime::UnstartedStringGetCharsNoCheck(
- Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) {
+void UnstartedRuntime::UnstartedStringGetCharsNoCheck(Thread* self,
+ ShadowFrame* shadow_frame,
+ [[maybe_unused]] JValue* result,
+ size_t arg_offset) {
jint start = shadow_frame->GetVReg(arg_offset + 1);
jint end = shadow_frame->GetVReg(arg_offset + 2);
jint index = shadow_frame->GetVReg(arg_offset + 4);
@@ -1477,8 +1514,10 @@
// where we can predict the behavior (somewhat).
// Note: this is required (instead of lazy initialization) as these classes are used in the static
// initialization of other classes, so will *use* the value.
-void UnstartedRuntime::UnstartedRuntimeAvailableProcessors(
- Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) {
+void UnstartedRuntime::UnstartedRuntimeAvailableProcessors(Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ [[maybe_unused]] size_t arg_offset) {
if (CheckCallers(shadow_frame, { "void java.util.concurrent.SynchronousQueue.<clinit>()" })) {
// SynchronousQueue really only separates between single- and multiprocessor case. Return
// 8 as a conservative upper approximation.
@@ -1628,8 +1667,10 @@
result->SetL(value);
}
-void UnstartedRuntime::UnstartedJdkUnsafePutObjectVolatile(
- Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset)
+void UnstartedRuntime::UnstartedJdkUnsafePutObjectVolatile(Thread* self,
+ ShadowFrame* shadow_frame,
+ [[maybe_unused]] JValue* result,
+ size_t arg_offset)
REQUIRES_SHARED(Locks::mutator_lock_) {
// Argument 0 is the Unsafe instance, skip.
mirror::Object* obj = shadow_frame->GetVRegReference(arg_offset + 1);
@@ -1650,8 +1691,10 @@
}
}
-void UnstartedRuntime::UnstartedJdkUnsafePutOrderedObject(
- Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset)
+void UnstartedRuntime::UnstartedJdkUnsafePutOrderedObject(Thread* self,
+ ShadowFrame* shadow_frame,
+ [[maybe_unused]] JValue* result,
+ size_t arg_offset)
REQUIRES_SHARED(Locks::mutator_lock_) {
// Argument 0 is the Unsafe instance, skip.
mirror::Object* obj = shadow_frame->GetVRegReference(arg_offset + 1);
@@ -1799,8 +1842,10 @@
}
}
-void UnstartedRuntime::UnstartedSystemIdentityHashCode(
- Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset)
+void UnstartedRuntime::UnstartedSystemIdentityHashCode([[maybe_unused]] Thread* self,
+ ShadowFrame* shadow_frame,
+ JValue* result,
+ size_t arg_offset)
REQUIRES_SHARED(Locks::mutator_lock_) {
mirror::Object* obj = shadow_frame->GetVRegReference(arg_offset);
result->SetI((obj != nullptr) ? obj->IdentityHashCode() : 0);
@@ -1810,9 +1855,11 @@
// java.lang.invoke.VarHandle clinit. The clinit determines sets of
// available VarHandle accessors and these differ based on machine
// word size.
-void UnstartedRuntime::UnstartedJNIVMRuntimeIs64Bit(
- Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
+void UnstartedRuntime::UnstartedJNIVMRuntimeIs64Bit([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
jboolean is64bit = (pointer_size == PointerSize::k64) ? JNI_TRUE : JNI_FALSE;
result->SetZ(is64bit);
@@ -1820,8 +1867,8 @@
void UnstartedRuntime::UnstartedJNIVMRuntimeNewUnpaddedArray(
Thread* self,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
uint32_t* args,
JValue* result) {
int32_t length = args[1];
@@ -1841,14 +1888,19 @@
}
void UnstartedRuntime::UnstartedJNIVMStackGetCallingClassLoader(
- Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
+ [[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
result->SetL(nullptr);
}
-void UnstartedRuntime::UnstartedJNIVMStackGetStackClass2(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED,
- uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
+void UnstartedRuntime::UnstartedJNIVMStackGetStackClass2(Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
NthCallerVisitor visitor(self, 3);
visitor.WalkStack();
if (visitor.caller != nullptr) {
@@ -1856,75 +1908,91 @@
}
}
-void UnstartedRuntime::UnstartedJNIMathLog(
- Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) {
+void UnstartedRuntime::UnstartedJNIMathLog([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ JValue* result) {
JValue value;
value.SetJ((static_cast<uint64_t>(args[1]) << 32) | args[0]);
result->SetD(log(value.GetD()));
}
-void UnstartedRuntime::UnstartedJNIMathExp(
- Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) {
+void UnstartedRuntime::UnstartedJNIMathExp([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ JValue* result) {
JValue value;
value.SetJ((static_cast<uint64_t>(args[1]) << 32) | args[0]);
result->SetD(exp(value.GetD()));
}
void UnstartedRuntime::UnstartedJNIAtomicLongVMSupportsCS8(
- Thread* self ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED,
- uint32_t* args ATTRIBUTE_UNUSED,
+ [[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
JValue* result) {
result->SetZ(QuasiAtomic::LongAtomicsUseMutexes(Runtime::Current()->GetInstructionSet())
? 0
: 1);
}
-void UnstartedRuntime::UnstartedJNIClassGetNameNative(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver,
- uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
+void UnstartedRuntime::UnstartedJNIClassGetNameNative(Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
StackHandleScope<1> hs(self);
result->SetL(mirror::Class::ComputeName(hs.NewHandle(receiver->AsClass())));
}
-void UnstartedRuntime::UnstartedJNIDoubleLongBitsToDouble(
- Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) {
+void UnstartedRuntime::UnstartedJNIDoubleLongBitsToDouble([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ JValue* result) {
uint64_t long_input = args[0] | (static_cast<uint64_t>(args[1]) << 32);
result->SetD(bit_cast<double>(long_input));
}
-void UnstartedRuntime::UnstartedJNIFloatFloatToRawIntBits(
- Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) {
+void UnstartedRuntime::UnstartedJNIFloatFloatToRawIntBits([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ JValue* result) {
result->SetI(args[0]);
}
-void UnstartedRuntime::UnstartedJNIFloatIntBitsToFloat(
- Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) {
+void UnstartedRuntime::UnstartedJNIFloatIntBitsToFloat([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ JValue* result) {
result->SetI(args[0]);
}
-void UnstartedRuntime::UnstartedJNIObjectInternalClone(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver,
- uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
+void UnstartedRuntime::UnstartedJNIObjectInternalClone(Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
StackHandleScope<1> hs(self);
Handle<mirror::Object> h_receiver = hs.NewHandle(receiver);
result->SetL(mirror::Object::Clone(h_receiver, self));
}
-void UnstartedRuntime::UnstartedJNIObjectNotifyAll(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver,
- uint32_t* args ATTRIBUTE_UNUSED, JValue* result ATTRIBUTE_UNUSED) {
+void UnstartedRuntime::UnstartedJNIObjectNotifyAll(Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ [[maybe_unused]] JValue* result) {
receiver->NotifyAll(self);
}
void UnstartedRuntime::UnstartedJNIStringCompareTo(Thread* self,
- ArtMethod* method ATTRIBUTE_UNUSED,
+ [[maybe_unused]] ArtMethod* method,
mirror::Object* receiver,
uint32_t* args,
JValue* result) {
@@ -1936,9 +2004,11 @@
result->SetI(receiver->AsString()->CompareTo(rhs->AsString()));
}
-void UnstartedRuntime::UnstartedJNIStringFillBytesLatin1(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver, uint32_t* args, JValue* ATTRIBUTE_UNUSED) {
+void UnstartedRuntime::UnstartedJNIStringFillBytesLatin1(Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ mirror::Object* receiver,
+ uint32_t* args,
+ [[maybe_unused]] JValue*) {
StackHandleScope<2> hs(self);
Handle<mirror::String> h_receiver(hs.NewHandle(
reinterpret_cast<mirror::String*>(receiver)->AsString()));
@@ -1948,9 +2018,11 @@
h_receiver->FillBytesLatin1(h_buffer, index);
}
-void UnstartedRuntime::UnstartedJNIStringFillBytesUTF16(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver, uint32_t* args, JValue* ATTRIBUTE_UNUSED) {
+void UnstartedRuntime::UnstartedJNIStringFillBytesUTF16(Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ mirror::Object* receiver,
+ uint32_t* args,
+ [[maybe_unused]] JValue*) {
StackHandleScope<2> hs(self);
Handle<mirror::String> h_receiver(hs.NewHandle(
reinterpret_cast<mirror::String*>(receiver)->AsString()));
@@ -1960,24 +2032,30 @@
h_receiver->FillBytesUTF16(h_buffer, index);
}
-void UnstartedRuntime::UnstartedJNIStringIntern(
- Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver,
- uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
+void UnstartedRuntime::UnstartedJNIStringIntern([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
result->SetL(receiver->AsString()->Intern());
}
-void UnstartedRuntime::UnstartedJNIArrayCreateMultiArray(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED,
- uint32_t* args, JValue* result) {
+void UnstartedRuntime::UnstartedJNIArrayCreateMultiArray(Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ JValue* result) {
StackHandleScope<2> hs(self);
auto h_class(hs.NewHandle(reinterpret_cast<mirror::Class*>(args[0])->AsClass()));
auto h_dimensions(hs.NewHandle(reinterpret_cast<mirror::IntArray*>(args[1])->AsIntArray()));
result->SetL(mirror::Array::CreateMultiArray(self, h_class, h_dimensions));
}
-void UnstartedRuntime::UnstartedJNIArrayCreateObjectArray(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED,
- uint32_t* args, JValue* result) {
+void UnstartedRuntime::UnstartedJNIArrayCreateObjectArray(Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ JValue* result) {
int32_t length = static_cast<int32_t>(args[1]);
if (length < 0) {
ThrowNegativeArraySizeException(length);
@@ -1998,8 +2076,11 @@
}
void UnstartedRuntime::UnstartedJNIThrowableNativeFillInStackTrace(
- Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED,
- uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
+ Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
ScopedObjectAccessUnchecked soa(self);
ScopedLocalRef<jobject> stack_trace(self->GetJniEnv(), self->CreateInternalStackTrace(soa));
result->SetL(soa.Decode<mirror::Object>(stack_trace.get()));
@@ -2048,19 +2129,18 @@
UnstartedJNIJdkUnsafeGetArrayIndexScaleForComponentType(self, method, receiver, args, result);
}
-void UnstartedRuntime::UnstartedJNIJdkUnsafeAddressSize(
- Thread* self ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED,
- uint32_t* args ATTRIBUTE_UNUSED,
- JValue* result) {
+void UnstartedRuntime::UnstartedJNIJdkUnsafeAddressSize([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
result->SetI(sizeof(void*));
}
void UnstartedRuntime::UnstartedJNIJdkUnsafeCompareAndSwapInt(
Thread* self,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
uint32_t* args,
JValue* result) {
ObjPtr<mirror::Object> obj = reinterpret_cast32<mirror::Object*>(args[0]);
@@ -2101,11 +2181,12 @@
UnstartedJNIJdkUnsafeCompareAndSwapInt(self, method, receiver, args, result);
}
-void UnstartedRuntime::UnstartedJNIJdkUnsafeGetIntVolatile(Thread* self,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED,
- uint32_t* args,
- JValue* result) {
+void UnstartedRuntime::UnstartedJNIJdkUnsafeGetIntVolatile(
+ Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ JValue* result) {
ObjPtr<mirror::Object> obj = reinterpret_cast32<mirror::Object*>(args[0]);
if (obj == nullptr) {
AbortTransactionOrFail(self, "Unsafe.compareAndSwapIntVolatile with null object.");
@@ -2117,10 +2198,10 @@
}
void UnstartedRuntime::UnstartedJNIJdkUnsafePutObject(Thread* self,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED,
- uint32_t* args,
- JValue* result ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
+ uint32_t* args,
+ [[maybe_unused]] JValue* result) {
ObjPtr<mirror::Object> obj = reinterpret_cast32<mirror::Object*>(args[0]);
if (obj == nullptr) {
AbortTransactionOrFail(self, "Unsafe.putObject with null object.");
@@ -2141,8 +2222,8 @@
void UnstartedRuntime::UnstartedJNIJdkUnsafeGetArrayBaseOffsetForComponentType(
Thread* self,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
uint32_t* args,
JValue* result) {
ObjPtr<mirror::Object> component = reinterpret_cast32<mirror::Object*>(args[0]);
@@ -2156,8 +2237,8 @@
void UnstartedRuntime::UnstartedJNIJdkUnsafeGetArrayIndexScaleForComponentType(
Thread* self,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver ATTRIBUTE_UNUSED,
+ [[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] mirror::Object* receiver,
uint32_t* args,
JValue* result) {
ObjPtr<mirror::Object> component = reinterpret_cast32<mirror::Object*>(args[0]);
@@ -2169,23 +2250,21 @@
result->SetI(Primitive::ComponentSize(primitive_type));
}
-void UnstartedRuntime::UnstartedJNIFieldGetArtField(
- Thread* self ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver,
- uint32_t* args ATTRIBUTE_UNUSED,
- JValue* result) {
+void UnstartedRuntime::UnstartedJNIFieldGetArtField([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
ObjPtr<mirror::Field> field = ObjPtr<mirror::Field>::DownCast(receiver);
ArtField* art_field = field->GetArtField();
result->SetJ(reinterpret_cast<int64_t>(art_field));
}
-void UnstartedRuntime::UnstartedJNIFieldGetNameInternal(
- Thread* self ATTRIBUTE_UNUSED,
- ArtMethod* method ATTRIBUTE_UNUSED,
- mirror::Object* receiver,
- uint32_t* args ATTRIBUTE_UNUSED,
- JValue* result) {
+void UnstartedRuntime::UnstartedJNIFieldGetNameInternal([[maybe_unused]] Thread* self,
+ [[maybe_unused]] ArtMethod* method,
+ mirror::Object* receiver,
+ [[maybe_unused]] uint32_t* args,
+ JValue* result) {
ObjPtr<mirror::Field> field = ObjPtr<mirror::Field>::DownCast(receiver);
ArtField* art_field = field->GetArtField();
result->SetL(art_field->ResolveNameString());
diff --git a/runtime/jit/jit.cc b/runtime/jit/jit.cc
index b231cce..ea475b5 100644
--- a/runtime/jit/jit.cc
+++ b/runtime/jit/jit.cc
@@ -866,7 +866,7 @@
explicit JitDoneCompilingProfileTask(const std::vector<const DexFile*>& dex_files)
: dex_files_(dex_files) {}
- void Run(Thread* self ATTRIBUTE_UNUSED) override {
+ void Run([[maybe_unused]] Thread* self) override {
// Madvise DONTNEED dex files now that we're done compiling methods.
for (const DexFile* dex_file : dex_files_) {
if (IsAddressKnownBackedByFileOrShared(dex_file->Begin())) {
@@ -890,7 +890,7 @@
public:
JitZygoteDoneCompilingTask() {}
- void Run(Thread* self ATTRIBUTE_UNUSED) override {
+ void Run([[maybe_unused]] Thread* self) override {
DCHECK(Runtime::Current()->IsZygote());
Runtime::Current()->GetJit()->GetCodeCache()->GetZygoteMap()->SetCompilationState(
ZygoteCompilationState::kDone);
diff --git a/runtime/jit/jit_memory_region.cc b/runtime/jit/jit_memory_region.cc
index 410bf70..cbfd39a 100644
--- a/runtime/jit/jit_memory_region.cc
+++ b/runtime/jit/jit_memory_region.cc
@@ -590,8 +590,8 @@
return fd;
}
-bool JitMemoryRegion::ProtectZygoteMemory(int fd ATTRIBUTE_UNUSED,
- std::string* error_msg ATTRIBUTE_UNUSED) {
+bool JitMemoryRegion::ProtectZygoteMemory([[maybe_unused]] int fd,
+ [[maybe_unused]] std::string* error_msg) {
return true;
}
diff --git a/runtime/jit/jit_memory_region_test.cc b/runtime/jit/jit_memory_region_test.cc
index 2a79777..a77ea81 100644
--- a/runtime/jit/jit_memory_region_test.cc
+++ b/runtime/jit/jit_memory_region_test.cc
@@ -39,8 +39,7 @@
// These globals are only set in child processes.
void* gAddrToFaultOn = nullptr;
-[[noreturn]]
-void handler(int ATTRIBUTE_UNUSED, siginfo_t* info, void* ATTRIBUTE_UNUSED) {
+[[noreturn]] void handler([[maybe_unused]] int, siginfo_t* info, [[maybe_unused]] void*) {
CHECK_EQ(info->si_addr, gAddrToFaultOn);
exit(kReturnFromFault);
}
diff --git a/runtime/jni/check_jni.cc b/runtime/jni/check_jni.cc
index eb54f98..3dc9b9f 100644
--- a/runtime/jni/check_jni.cc
+++ b/runtime/jni/check_jni.cc
@@ -1617,8 +1617,10 @@
* Perform the array "release" operation, which may or may not copy data
* back into the managed heap, and may or may not release the underlying storage.
*/
- static void* ReleaseGuardedPACopy(const char* function_name, JNIEnv* env,
- jarray java_array ATTRIBUTE_UNUSED, void* embedded_buf,
+ static void* ReleaseGuardedPACopy(const char* function_name,
+ JNIEnv* env,
+ [[maybe_unused]] jarray java_array,
+ void* embedded_buf,
int mode) {
ScopedObjectAccess soa(env);
if (!GuardedCopy::Check(function_name, embedded_buf, true)) {
@@ -1635,7 +1637,6 @@
return original_ptr;
}
-
/*
* Free up the guard buffer, scrub it, and return the original pointer.
*/
diff --git a/runtime/jni/java_vm_ext_test.cc b/runtime/jni/java_vm_ext_test.cc
index cae33b5..e7295aa 100644
--- a/runtime/jni/java_vm_ext_test.cc
+++ b/runtime/jni/java_vm_ext_test.cc
@@ -62,7 +62,7 @@
static bool gSmallStack = false;
static bool gAsDaemon = false;
-static void* attach_current_thread_callback(void* arg ATTRIBUTE_UNUSED) {
+static void* attach_current_thread_callback([[maybe_unused]] void* arg) {
JavaVM* vms_buf[1];
jsize num_vms;
JNIEnv* env;
diff --git a/runtime/jni/jni_env_ext.cc b/runtime/jni/jni_env_ext.cc
index bef0fd3..fcf38ba 100644
--- a/runtime/jni/jni_env_ext.cc
+++ b/runtime/jni/jni_env_ext.cc
@@ -289,7 +289,7 @@
}
}
-void ThreadResetFunctionTable(Thread* thread, void* arg ATTRIBUTE_UNUSED)
+void ThreadResetFunctionTable(Thread* thread, [[maybe_unused]] void* arg)
REQUIRES(Locks::jni_function_table_lock_) {
JNIEnvExt* env = thread->GetJniEnv();
bool check_jni = env->IsCheckJniEnabled();
diff --git a/runtime/jni/jni_id_manager.cc b/runtime/jni/jni_id_manager.cc
index e556f61..5af1a78 100644
--- a/runtime/jni/jni_id_manager.cc
+++ b/runtime/jni/jni_id_manager.cc
@@ -100,7 +100,7 @@
REQUIRES_SHARED(Locks::mutator_lock_);
template <>
-bool ShouldReturnPointer(ObjPtr<mirror::Class> klass, ArtMethod* t ATTRIBUTE_UNUSED) {
+bool ShouldReturnPointer(ObjPtr<mirror::Class> klass, [[maybe_unused]] ArtMethod* t) {
ObjPtr<mirror::ClassExt> ext(klass->GetExtData());
if (ext.IsNull()) {
return true;
@@ -176,7 +176,7 @@
size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtType* t, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
template <>
-size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtField* f, PointerSize ptr_size ATTRIBUTE_UNUSED) {
+size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtField* f, [[maybe_unused]] PointerSize ptr_size) {
return f->IsStatic() ? k->GetStaticFieldIdOffset(f) : k->GetInstanceFieldIdOffset(f);
}
template <>
@@ -208,7 +208,7 @@
template <typename ArtType>
bool CanUseIdArrays(ReflectiveHandle<ArtType> t) REQUIRES_SHARED(Locks::mutator_lock_);
template <>
-bool CanUseIdArrays(ReflectiveHandle<ArtField> t ATTRIBUTE_UNUSED) {
+bool CanUseIdArrays([[maybe_unused]] ReflectiveHandle<ArtField> t) {
return true;
}
template <>
@@ -264,7 +264,7 @@
}
template <>
size_t JniIdManager::GetLinearSearchStartId<ArtField>(
- ReflectiveHandle<ArtField> t ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] ReflectiveHandle<ArtField> t) {
return deferred_allocation_field_id_start_;
}
diff --git a/runtime/jni/jni_internal.cc b/runtime/jni/jni_internal.cc
index ad2efc5..71bed2e 100644
--- a/runtime/jni/jni_internal.cc
+++ b/runtime/jni/jni_internal.cc
@@ -162,7 +162,7 @@
NewStringUTFVisitor(const char* utf, size_t utf8_length, int32_t count, bool has_bad_char)
: utf_(utf), utf8_length_(utf8_length), count_(count), has_bad_char_(has_bad_char) {}
- void operator()(ObjPtr<mirror::Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<mirror::Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Avoid AsString as object is not yet in live bitmap or allocation stack.
ObjPtr<mirror::String> string = ObjPtr<mirror::String>::DownCast(obj);
@@ -226,7 +226,7 @@
jsize GetUncompressedStringUTFLength(const uint16_t* chars, size_t length) {
jsize byte_count = 0;
ConvertUtf16ToUtf8<kUtfUseShortZero, kUtfUse4ByteSequence, kUtfReplaceBadSurrogates>(
- chars, length, [&](char c ATTRIBUTE_UNUSED) { ++byte_count; });
+ chars, length, [&]([[maybe_unused]] char c) { ++byte_count; });
return byte_count;
}
@@ -2830,7 +2830,7 @@
return static_cast<jlong>(WellKnownClasses::java_nio_Buffer_capacity->GetInt(buffer.Get()));
}
- static jobjectRefType GetObjectRefType(JNIEnv* env ATTRIBUTE_UNUSED, jobject java_object) {
+ static jobjectRefType GetObjectRefType([[maybe_unused]] JNIEnv* env, jobject java_object) {
if (java_object == nullptr) {
return JNIInvalidRefType;
}
diff --git a/runtime/jni/local_reference_table.h b/runtime/jni/local_reference_table.h
index 900e4c3..22fa4a9 100644
--- a/runtime/jni/local_reference_table.h
+++ b/runtime/jni/local_reference_table.h
@@ -333,7 +333,7 @@
void SetSegmentState(LRTSegmentState new_state);
- static Offset SegmentStateOffset(size_t pointer_size ATTRIBUTE_UNUSED) {
+ static Offset SegmentStateOffset([[maybe_unused]] size_t pointer_size) {
// Note: Currently segment_state_ is at offset 0. We're testing the expected value in
// jni_internal_test to make sure it stays correct. It is not OFFSETOF_MEMBER, as that
// is not pointer-size-safe.
diff --git a/runtime/metrics/reporter_test.cc b/runtime/metrics/reporter_test.cc
index 848a74e..a61f8be 100644
--- a/runtime/metrics/reporter_test.cc
+++ b/runtime/metrics/reporter_test.cc
@@ -65,10 +65,10 @@
current_report_->data.Put(counter_type, value);
}
- void ReportHistogram(DatumId histogram_type ATTRIBUTE_UNUSED,
- int64_t low_value ATTRIBUTE_UNUSED,
- int64_t high_value ATTRIBUTE_UNUSED,
- const std::vector<uint32_t>& buckets ATTRIBUTE_UNUSED) override {
+ void ReportHistogram([[maybe_unused]] DatumId histogram_type,
+ [[maybe_unused]] int64_t low_value,
+ [[maybe_unused]] int64_t high_value,
+ [[maybe_unused]] const std::vector<uint32_t>& buckets) override {
// TODO: nothing yet. We should implement and test histograms as well.
}
diff --git a/runtime/mirror/accessible_object.h b/runtime/mirror/accessible_object.h
index 7c0d91a..1e434a1 100644
--- a/runtime/mirror/accessible_object.h
+++ b/runtime/mirror/accessible_object.h
@@ -39,7 +39,7 @@
private:
// We only use the field indirectly using the FlagOffset() method.
- uint8_t flag_ ATTRIBUTE_UNUSED;
+ [[maybe_unused]] uint8_t flag_;
DISALLOW_IMPLICIT_CONSTRUCTORS(AccessibleObject);
};
diff --git a/runtime/mirror/array-alloc-inl.h b/runtime/mirror/array-alloc-inl.h
index c1e0175..32840d4 100644
--- a/runtime/mirror/array-alloc-inl.h
+++ b/runtime/mirror/array-alloc-inl.h
@@ -67,7 +67,7 @@
explicit SetLengthVisitor(int32_t length) : length_(length) {
}
- void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Avoid AsArray as object is not yet in live bitmap or allocation stack.
ObjPtr<Array> array = ObjPtr<Array>::DownCast(obj);
diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h
index 5d64167..e7cfb92 100644
--- a/runtime/mirror/array.h
+++ b/runtime/mirror/array.h
@@ -140,10 +140,10 @@
// The number of array elements.
// We only use the field indirectly using the LengthOffset() method.
- int32_t length_ ATTRIBUTE_UNUSED;
+ [[maybe_unused]] int32_t length_;
// Marker for the data (used by generated code)
// We only use the field indirectly using the DataOffset() method.
- uint32_t first_element_[0] ATTRIBUTE_UNUSED;
+ [[maybe_unused]] uint32_t first_element_[0];
DISALLOW_IMPLICIT_CONSTRUCTORS(Array);
};
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 6458613..296eeed 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -1605,9 +1605,9 @@
class ReadBarrierOnNativeRootsVisitor {
public:
- void operator()(ObjPtr<Object> obj ATTRIBUTE_UNUSED,
- MemberOffset offset ATTRIBUTE_UNUSED,
- bool is_static ATTRIBUTE_UNUSED) const {}
+ void operator()([[maybe_unused]] ObjPtr<Object> obj,
+ [[maybe_unused]] MemberOffset offset,
+ [[maybe_unused]] bool is_static) const {}
void VisitRootIfNonNull(CompressedReference<Object>* root) const
REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -1644,7 +1644,7 @@
copy_bytes_(copy_bytes), imt_(imt), pointer_size_(pointer_size) {
}
- void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
StackHandleScope<1> hs(self_);
Handle<mirror::Class> h_new_class_obj(hs.NewHandle(obj->AsClass()));
diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h
index 197172c..8a7ab88 100644
--- a/runtime/mirror/class_loader.h
+++ b/runtime/mirror/class_loader.h
@@ -81,7 +81,7 @@
HeapReference<ClassLoader> parent_;
HeapReference<Object> proxyCache_;
// Native pointer to class table, need to zero this out when image writing.
- uint32_t padding_ ATTRIBUTE_UNUSED;
+ [[maybe_unused]] uint32_t padding_;
uint64_t allocator_;
uint64_t class_table_;
diff --git a/runtime/mirror/executable.h b/runtime/mirror/executable.h
index dc4ec95..079efc3 100644
--- a/runtime/mirror/executable.h
+++ b/runtime/mirror/executable.h
@@ -64,7 +64,7 @@
uint8_t has_real_parameter_data_;
// Padding required for matching alignment with the Java peer.
- uint8_t padding_[2] ATTRIBUTE_UNUSED;
+ [[maybe_unused]] uint8_t padding_[2];
HeapReference<mirror::Class> declaring_class_;
HeapReference<mirror::Class> declaring_class_of_overridden_method_;
diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc
index 5016c20..940b82d 100644
--- a/runtime/mirror/object.cc
+++ b/runtime/mirror/object.cc
@@ -66,9 +66,9 @@
}
// Unused since we don't copy class native roots.
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+ void VisitRootIfNonNull(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
+ void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {}
private:
const ObjPtr<Object> dest_obj_;
@@ -144,7 +144,7 @@
CopyObjectVisitor(Handle<Object>* orig, size_t num_bytes)
: orig_(orig), num_bytes_(num_bytes) {}
- void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
Object::CopyObject(obj, orig_->Get(), num_bytes_);
}
diff --git a/runtime/mirror/string-alloc-inl.h b/runtime/mirror/string-alloc-inl.h
index cb2dcb2..9c2529c 100644
--- a/runtime/mirror/string-alloc-inl.h
+++ b/runtime/mirror/string-alloc-inl.h
@@ -41,7 +41,7 @@
explicit SetStringCountVisitor(int32_t count) : count_(count) {
}
- void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Avoid AsString as object is not yet in live bitmap or allocation stack.
ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
@@ -61,7 +61,7 @@
: count_(count), src_array_(src_array), offset_(offset), high_byte_(high_byte) {
}
- void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Avoid AsString as object is not yet in live bitmap or allocation stack.
ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
@@ -96,7 +96,7 @@
: count_(count), src_array_(src_array), offset_(offset) {
}
- void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Avoid AsString as object is not yet in live bitmap or allocation stack.
ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
@@ -132,7 +132,7 @@
count_(count), src_array_(src_array), offset_(offset) {
}
- void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Avoid AsString as object is not yet in live bitmap or allocation stack.
ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
@@ -163,7 +163,7 @@
count_(count), src_string_(src_string), offset_(offset) {
}
- void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
+ void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const
REQUIRES_SHARED(Locks::mutator_lock_) {
// Avoid AsString as object is not yet in live bitmap or allocation stack.
ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
diff --git a/runtime/native/dalvik_system_DexFile.cc b/runtime/native/dalvik_system_DexFile.cc
index 9f0c216..f602f73 100644
--- a/runtime/native/dalvik_system_DexFile.cc
+++ b/runtime/native/dalvik_system_DexFile.cc
@@ -368,8 +368,8 @@
static jobject DexFile_openDexFileNative(JNIEnv* env,
jclass,
jstring javaSourceName,
- jstring javaOutputName ATTRIBUTE_UNUSED,
- jint flags ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jstring javaOutputName,
+ [[maybe_unused]] jint flags,
jobject class_loader,
jobjectArray dex_elements) {
ScopedUtfChars sourceName(env, javaSourceName);
@@ -758,8 +758,8 @@
}
static jboolean DexFile_isValidCompilerFilter(JNIEnv* env,
- jclass javeDexFileClass ATTRIBUTE_UNUSED,
- jstring javaCompilerFilter) {
+ [[maybe_unused]] jclass javaDexFileClass,
+ jstring javaCompilerFilter) {
ScopedUtfChars compiler_filter(env, javaCompilerFilter);
if (env->ExceptionCheck()) {
return -1;
@@ -771,7 +771,7 @@
}
static jboolean DexFile_isProfileGuidedCompilerFilter(JNIEnv* env,
- jclass javeDexFileClass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass javaDexFileClass,
jstring javaCompilerFilter) {
ScopedUtfChars compiler_filter(env, javaCompilerFilter);
if (env->ExceptionCheck()) {
@@ -786,7 +786,7 @@
}
static jboolean DexFile_isVerifiedCompilerFilter(JNIEnv* env,
- jclass javeDexFileClass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass javaDexFileClass,
jstring javaCompilerFilter) {
ScopedUtfChars compiler_filter(env, javaCompilerFilter);
if (env->ExceptionCheck()) {
@@ -801,7 +801,7 @@
}
static jboolean DexFile_isOptimizedCompilerFilter(JNIEnv* env,
- jclass javeDexFileClass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass javaDexFileClass,
jstring javaCompilerFilter) {
ScopedUtfChars compiler_filter(env, javaCompilerFilter);
if (env->ExceptionCheck()) {
@@ -816,12 +816,12 @@
}
static jboolean DexFile_isReadOnlyJavaDclEnforced(JNIEnv* env,
- jclass javeDexFileClass ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass javaDexFileClass) {
return (isReadOnlyJavaDclChecked() && isReadOnlyJavaDclEnforced(env)) ? JNI_TRUE : JNI_FALSE;
}
static jstring DexFile_getNonProfileGuidedCompilerFilter(JNIEnv* env,
- jclass javeDexFileClass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass javaDexFileClass,
jstring javaCompilerFilter) {
ScopedUtfChars compiler_filter(env, javaCompilerFilter);
if (env->ExceptionCheck()) {
@@ -846,7 +846,7 @@
}
static jstring DexFile_getSafeModeCompilerFilter(JNIEnv* env,
- jclass javeDexFileClass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass javaDexFileClass,
jstring javaCompilerFilter) {
ScopedUtfChars compiler_filter(env, javaCompilerFilter);
if (env->ExceptionCheck()) {
diff --git a/runtime/native/dalvik_system_VMDebug.cc b/runtime/native/dalvik_system_VMDebug.cc
index 3653a83..65d131a 100644
--- a/runtime/native/dalvik_system_VMDebug.cc
+++ b/runtime/native/dalvik_system_VMDebug.cc
@@ -90,7 +90,7 @@
static void VMDebug_startMethodTracingFd(JNIEnv* env,
jclass,
- jstring javaTraceFilename ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jstring javaTraceFilename,
jint javaFd,
jint bufferSize,
jint flags,
diff --git a/runtime/native/dalvik_system_VMRuntime.cc b/runtime/native/dalvik_system_VMRuntime.cc
index 9e2e8b9..1ffb7ce 100644
--- a/runtime/native/dalvik_system_VMRuntime.cc
+++ b/runtime/native/dalvik_system_VMRuntime.cc
@@ -237,8 +237,8 @@
return down_cast<JNIEnvExt*>(env)->GetVm()->IsCheckJniEnabled() ? JNI_TRUE : JNI_FALSE;
}
-static jint VMRuntime_getSdkVersionNative(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED,
+static jint VMRuntime_getSdkVersionNative([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass,
jint default_sdk_version) {
return android::base::GetIntProperty("ro.build.version.sdk",
default_sdk_version);
@@ -355,8 +355,7 @@
Runtime::Current()->GetHeap()->GetTaskProcessor()->RunAllTasks(Thread::ForEnv(env));
}
-static void VMRuntime_preloadDexCaches(JNIEnv* env ATTRIBUTE_UNUSED, jobject) {
-}
+static void VMRuntime_preloadDexCaches([[maybe_unused]] JNIEnv* env, jobject) {}
/*
* This is called by the framework after it loads a code path on behalf of the app.
@@ -364,7 +363,7 @@
* for more precise telemetry (e.g. is the split apk odex up to date?) and debugging.
*/
static void VMRuntime_registerAppInfo(JNIEnv* env,
- jclass clazz ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass clazz,
jstring package_name,
jstring cur_profile_file,
jstring ref_profile_file,
@@ -418,8 +417,8 @@
return env->NewStringUTF(GetInstructionSetString(kRuntimeISA));
}
-static void VMRuntime_setSystemDaemonThreadPriority(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+static void VMRuntime_setSystemDaemonThreadPriority([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
#ifdef ART_TARGET_ANDROID
Thread* self = Thread::Current();
DCHECK(self != nullptr);
@@ -435,14 +434,14 @@
#endif
}
-static void VMRuntime_setDedupeHiddenApiWarnings(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED,
+static void VMRuntime_setDedupeHiddenApiWarnings([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass,
jboolean dedupe) {
Runtime::Current()->SetDedupeHiddenApiWarnings(dedupe);
}
static void VMRuntime_setProcessPackageName(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jstring java_package_name) {
ScopedUtfChars package_name(env, java_package_name);
Runtime::Current()->SetProcessPackageName(package_name.c_str());
@@ -453,8 +452,7 @@
Runtime::Current()->SetProcessDataDirectory(data_dir.c_str());
}
-static void VMRuntime_bootCompleted(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+static void VMRuntime_bootCompleted([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass) {
jit::Jit* jit = Runtime::Current()->GetJit();
if (jit != nullptr) {
jit->BootCompleted();
@@ -482,14 +480,14 @@
}
};
-static void VMRuntime_resetJitCounters(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+static void VMRuntime_resetJitCounters(JNIEnv* env, [[maybe_unused]] jclass klass) {
ScopedObjectAccess soa(env);
ClearJitCountersVisitor visitor;
Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
}
static jboolean VMRuntime_isValidClassLoaderContext(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jstring jencoded_class_loader_context) {
if (UNLIKELY(jencoded_class_loader_context == nullptr)) {
ScopedFastNativeObjectAccess soa(env);
@@ -500,7 +498,7 @@
return ClassLoaderContext::IsValidEncoding(encoded_class_loader_context.c_str());
}
-static jobject VMRuntime_getBaseApkOptimizationInfo(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+static jobject VMRuntime_getBaseApkOptimizationInfo(JNIEnv* env, [[maybe_unused]] jclass klass) {
AppInfo* app_info = Runtime::Current()->GetAppInfo();
DCHECK(app_info != nullptr);
diff --git a/runtime/native/dalvik_system_ZygoteHooks.cc b/runtime/native/dalvik_system_ZygoteHooks.cc
index 3c73cc5..5ea6d3f 100644
--- a/runtime/native/dalvik_system_ZygoteHooks.cc
+++ b/runtime/native/dalvik_system_ZygoteHooks.cc
@@ -266,8 +266,8 @@
Runtime::Current()->PostZygoteFork();
}
-static void ZygoteHooks_nativePostForkSystemServer(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED,
+static void ZygoteHooks_nativePostForkSystemServer([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass,
jint runtime_flags) {
// Reload the current flags first. In case we need to take actions based on them.
Runtime::Current()->ReloadAllFlags(__FUNCTION__);
@@ -441,18 +441,18 @@
}
}
-static void ZygoteHooks_startZygoteNoThreadCreation(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+static void ZygoteHooks_startZygoteNoThreadCreation([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
Runtime::Current()->SetZygoteNoThreadSection(true);
}
-static void ZygoteHooks_stopZygoteNoThreadCreation(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+static void ZygoteHooks_stopZygoteNoThreadCreation([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
Runtime::Current()->SetZygoteNoThreadSection(false);
}
-static jboolean ZygoteHooks_nativeZygoteLongSuspendOk(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+static jboolean ZygoteHooks_nativeZygoteLongSuspendOk([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
// Indefinite thread suspensions are not OK if we're supposed to be JIT-compiling for other
// processes. We only care about JIT compilation that affects other processes. The zygote
// itself doesn't run appreciable amounts of Java code when running single-threaded, so
@@ -464,7 +464,6 @@
return (isJitZygote || explicitlyDisabled) ? JNI_FALSE : JNI_TRUE;
}
-
static JNINativeMethod gMethods[] = {
NATIVE_METHOD(ZygoteHooks, nativePreFork, "()J"),
NATIVE_METHOD(ZygoteHooks, nativePostZygoteFork, "()V"),
diff --git a/runtime/native/java_lang_reflect_Constructor.cc b/runtime/native/java_lang_reflect_Constructor.cc
index 4b2cc43..98afddc 100644
--- a/runtime/native/java_lang_reflect_Constructor.cc
+++ b/runtime/native/java_lang_reflect_Constructor.cc
@@ -120,11 +120,13 @@
return javaReceiver;
}
-static jobject Constructor_newInstanceFromSerialization(JNIEnv* env, jclass unused ATTRIBUTE_UNUSED,
- jclass ctorClass, jclass allocClass) {
- jmethodID ctor = env->GetMethodID(ctorClass, "<init>", "()V");
- DCHECK(ctor != nullptr);
- return env->NewObject(allocClass, ctor);
+static jobject Constructor_newInstanceFromSerialization(JNIEnv* env,
+ [[maybe_unused]] jclass unused,
+ jclass ctorClass,
+ jclass allocClass) {
+ jmethodID ctor = env->GetMethodID(ctorClass, "<init>", "()V");
+ DCHECK(ctor != nullptr);
+ return env->NewObject(allocClass, ctor);
}
static JNINativeMethod gMethods[] = {
diff --git a/runtime/native/jdk_internal_misc_Unsafe.cc b/runtime/native/jdk_internal_misc_Unsafe.cc
index 6e2f558..9b2021d 100644
--- a/runtime/native/jdk_internal_misc_Unsafe.cc
+++ b/runtime/native/jdk_internal_misc_Unsafe.cc
@@ -261,11 +261,11 @@
return Primitive::ComponentSize(primitive_type);
}
-static jint Unsafe_addressSize(JNIEnv* env ATTRIBUTE_UNUSED, jobject ob ATTRIBUTE_UNUSED) {
+static jint Unsafe_addressSize([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject ob) {
return sizeof(void*);
}
-static jint Unsafe_pageSize(JNIEnv* env ATTRIBUTE_UNUSED, jobject ob ATTRIBUTE_UNUSED) {
+static jint Unsafe_pageSize([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject ob) {
return sysconf(_SC_PAGESIZE);
}
@@ -288,73 +288,80 @@
return reinterpret_cast<uintptr_t>(mem);
}
-static void Unsafe_freeMemory(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static void Unsafe_freeMemory([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
free(reinterpret_cast<void*>(static_cast<uintptr_t>(address)));
}
-static void Unsafe_setMemory(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jlong bytes, jbyte value) {
+static void Unsafe_setMemory(
+ [[maybe_unused]] JNIEnv* env, jobject, jlong address, jlong bytes, jbyte value) {
memset(reinterpret_cast<void*>(static_cast<uintptr_t>(address)), value, bytes);
}
-static jbyte Unsafe_getByteJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jbyte Unsafe_getByteJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jbyte*>(address);
}
-static void Unsafe_putByteJB(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jbyte value) {
+static void Unsafe_putByteJB([[maybe_unused]] JNIEnv* env, jobject, jlong address, jbyte value) {
*reinterpret_cast<jbyte*>(address) = value;
}
-static jshort Unsafe_getShortJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jshort Unsafe_getShortJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jshort*>(address);
}
-static void Unsafe_putShortJS(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jshort value) {
+static void Unsafe_putShortJS([[maybe_unused]] JNIEnv* env, jobject, jlong address, jshort value) {
*reinterpret_cast<jshort*>(address) = value;
}
-static jchar Unsafe_getCharJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jchar Unsafe_getCharJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jchar*>(address);
}
-static void Unsafe_putCharJC(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jchar value) {
+static void Unsafe_putCharJC([[maybe_unused]] JNIEnv* env, jobject, jlong address, jchar value) {
*reinterpret_cast<jchar*>(address) = value;
}
-static jint Unsafe_getIntJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jint Unsafe_getIntJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jint*>(address);
}
-static void Unsafe_putIntJI(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jint value) {
+static void Unsafe_putIntJI([[maybe_unused]] JNIEnv* env, jobject, jlong address, jint value) {
*reinterpret_cast<jint*>(address) = value;
}
-static jlong Unsafe_getLongJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jlong Unsafe_getLongJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jlong*>(address);
}
-static void Unsafe_putLongJJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jlong value) {
+static void Unsafe_putLongJJ([[maybe_unused]] JNIEnv* env, jobject, jlong address, jlong value) {
*reinterpret_cast<jlong*>(address) = value;
}
-static jfloat Unsafe_getFloatJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jfloat Unsafe_getFloatJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jfloat*>(address);
}
-static void Unsafe_putFloatJF(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jfloat value) {
+static void Unsafe_putFloatJF([[maybe_unused]] JNIEnv* env, jobject, jlong address, jfloat value) {
*reinterpret_cast<jfloat*>(address) = value;
}
-static jdouble Unsafe_getDoubleJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jdouble Unsafe_getDoubleJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jdouble*>(address);
}
-static void Unsafe_putDoubleJD(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jdouble value) {
+static void Unsafe_putDoubleJD([[maybe_unused]] JNIEnv* env,
+ jobject,
+ jlong address,
+ jdouble value) {
*reinterpret_cast<jdouble*>(address) = value;
}
-static void Unsafe_copyMemory0(JNIEnv *env, jobject unsafe ATTRIBUTE_UNUSED,
- jobject srcObj, jlong srcOffset,
- jobject dstObj, jlong dstOffset,
- jlong size) {
+static void Unsafe_copyMemory0(JNIEnv* env,
+ [[maybe_unused]] jobject unsafe,
+ jobject srcObj,
+ jlong srcOffset,
+ jobject dstObj,
+ jlong dstOffset,
+ jlong size) {
ScopedFastNativeObjectAccess soa(env);
if (size == 0) {
return;
diff --git a/runtime/native/libcore_util_CharsetUtils.cc b/runtime/native/libcore_util_CharsetUtils.cc
index c53fd6e..46f8993 100644
--- a/runtime/native/libcore_util_CharsetUtils.cc
+++ b/runtime/native/libcore_util_CharsetUtils.cc
@@ -113,7 +113,7 @@
utf8_length = length;
} else {
const uint16_t* utf16 = string->GetValue() + offset;
- auto count_length = [&utf8_length](jbyte c ATTRIBUTE_UNUSED) ALWAYS_INLINE { ++utf8_length; };
+ auto count_length = [&utf8_length]([[maybe_unused]] jbyte c) ALWAYS_INLINE { ++utf8_length; };
ConvertUtf16ToUtf8</*kUseShortZero=*/ true,
/*kUse4ByteSequence=*/ true,
/*kReplaceBadSurrogates=*/ true>(utf16, length, count_length);
diff --git a/runtime/native/sun_misc_Unsafe.cc b/runtime/native/sun_misc_Unsafe.cc
index 8a203ce..f1e47ee 100644
--- a/runtime/native/sun_misc_Unsafe.cc
+++ b/runtime/native/sun_misc_Unsafe.cc
@@ -219,11 +219,11 @@
return Primitive::ComponentSize(primitive_type);
}
-static jint Unsafe_addressSize(JNIEnv* env ATTRIBUTE_UNUSED, jobject ob ATTRIBUTE_UNUSED) {
+static jint Unsafe_addressSize([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject ob) {
return sizeof(void*);
}
-static jint Unsafe_pageSize(JNIEnv* env ATTRIBUTE_UNUSED, jobject ob ATTRIBUTE_UNUSED) {
+static jint Unsafe_pageSize([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject ob) {
return sysconf(_SC_PAGESIZE);
}
@@ -242,71 +242,75 @@
return (uintptr_t) mem;
}
-static void Unsafe_freeMemory(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static void Unsafe_freeMemory([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
free(reinterpret_cast<void*>(static_cast<uintptr_t>(address)));
}
-static void Unsafe_setMemory(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jlong bytes, jbyte value) {
+static void Unsafe_setMemory(
+ [[maybe_unused]] JNIEnv* env, jobject, jlong address, jlong bytes, jbyte value) {
memset(reinterpret_cast<void*>(static_cast<uintptr_t>(address)), value, bytes);
}
-static jbyte Unsafe_getByteJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jbyte Unsafe_getByteJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jbyte*>(address);
}
-static void Unsafe_putByteJB(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jbyte value) {
+static void Unsafe_putByteJB([[maybe_unused]] JNIEnv* env, jobject, jlong address, jbyte value) {
*reinterpret_cast<jbyte*>(address) = value;
}
-static jshort Unsafe_getShortJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jshort Unsafe_getShortJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jshort*>(address);
}
-static void Unsafe_putShortJS(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jshort value) {
+static void Unsafe_putShortJS([[maybe_unused]] JNIEnv* env, jobject, jlong address, jshort value) {
*reinterpret_cast<jshort*>(address) = value;
}
-static jchar Unsafe_getCharJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jchar Unsafe_getCharJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jchar*>(address);
}
-static void Unsafe_putCharJC(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jchar value) {
+static void Unsafe_putCharJC([[maybe_unused]] JNIEnv* env, jobject, jlong address, jchar value) {
*reinterpret_cast<jchar*>(address) = value;
}
-static jint Unsafe_getIntJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jint Unsafe_getIntJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jint*>(address);
}
-static void Unsafe_putIntJI(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jint value) {
+static void Unsafe_putIntJI([[maybe_unused]] JNIEnv* env, jobject, jlong address, jint value) {
*reinterpret_cast<jint*>(address) = value;
}
-static jlong Unsafe_getLongJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jlong Unsafe_getLongJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jlong*>(address);
}
-static void Unsafe_putLongJJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jlong value) {
+static void Unsafe_putLongJJ([[maybe_unused]] JNIEnv* env, jobject, jlong address, jlong value) {
*reinterpret_cast<jlong*>(address) = value;
}
-static jfloat Unsafe_getFloatJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jfloat Unsafe_getFloatJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jfloat*>(address);
}
-static void Unsafe_putFloatJF(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jfloat value) {
+static void Unsafe_putFloatJF([[maybe_unused]] JNIEnv* env, jobject, jlong address, jfloat value) {
*reinterpret_cast<jfloat*>(address) = value;
}
-static jdouble Unsafe_getDoubleJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) {
+static jdouble Unsafe_getDoubleJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) {
return *reinterpret_cast<jdouble*>(address);
}
-static void Unsafe_putDoubleJD(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jdouble value) {
+static void Unsafe_putDoubleJD([[maybe_unused]] JNIEnv* env,
+ jobject,
+ jlong address,
+ jdouble value) {
*reinterpret_cast<jdouble*>(address) = value;
}
-static void Unsafe_copyMemory(JNIEnv *env, jobject unsafe ATTRIBUTE_UNUSED, jlong src,
- jlong dst, jlong size) {
+static void Unsafe_copyMemory(
+ JNIEnv* env, [[maybe_unused]] jobject unsafe, jlong src, jlong dst, jlong size) {
if (size == 0) {
return;
}
@@ -347,8 +351,8 @@
}
}
-static void Unsafe_copyMemoryToPrimitiveArray(JNIEnv *env,
- jobject unsafe ATTRIBUTE_UNUSED,
+static void Unsafe_copyMemoryToPrimitiveArray(JNIEnv* env,
+ [[maybe_unused]] jobject unsafe,
jlong srcAddr,
jobject dstObj,
jlong dstOffset,
@@ -382,8 +386,8 @@
}
}
-static void Unsafe_copyMemoryFromPrimitiveArray(JNIEnv *env,
- jobject unsafe ATTRIBUTE_UNUSED,
+static void Unsafe_copyMemoryFromPrimitiveArray(JNIEnv* env,
+ [[maybe_unused]] jobject unsafe,
jobject srcObj,
jlong srcOffset,
jlong dstAddr,
diff --git a/runtime/native_stack_dump.cc b/runtime/native_stack_dump.cc
index d6a0fae..bda912e 100644
--- a/runtime/native_stack_dump.cc
+++ b/runtime/native_stack_dump.cc
@@ -431,22 +431,20 @@
#elif defined(__APPLE__)
-void DumpNativeStack(std::ostream& os ATTRIBUTE_UNUSED,
- pid_t tid ATTRIBUTE_UNUSED,
- const char* prefix ATTRIBUTE_UNUSED,
- ArtMethod* current_method ATTRIBUTE_UNUSED,
- void* ucontext_ptr ATTRIBUTE_UNUSED,
- bool skip_frames ATTRIBUTE_UNUSED) {
-}
+void DumpNativeStack([[maybe_unused]] std::ostream& os,
+ [[maybe_unused]] pid_t tid,
+ [[maybe_unused]] const char* prefix,
+ [[maybe_unused]] ArtMethod* current_method,
+ [[maybe_unused]] void* ucontext_ptr,
+ [[maybe_unused]] bool skip_frames) {}
-void DumpNativeStack(std::ostream& os ATTRIBUTE_UNUSED,
- unwindstack::AndroidLocalUnwinder& existing_map ATTRIBUTE_UNUSED,
- pid_t tid ATTRIBUTE_UNUSED,
- const char* prefix ATTRIBUTE_UNUSED,
- ArtMethod* current_method ATTRIBUTE_UNUSED,
- void* ucontext_ptr ATTRIBUTE_UNUSED,
- bool skip_frames ATTRIBUTE_UNUSED) {
-}
+void DumpNativeStack([[maybe_unused]] std::ostream& os,
+ [[maybe_unused]] unwindstack::AndroidLocalUnwinder& existing_map,
+ [[maybe_unused]] pid_t tid,
+ [[maybe_unused]] const char* prefix,
+ [[maybe_unused]] ArtMethod* current_method,
+ [[maybe_unused]] void* ucontext_ptr,
+ [[maybe_unused]] bool skip_frames) {}
#else
#error "Unsupported architecture for native stack dumps."
diff --git a/runtime/noop_compiler_callbacks.h b/runtime/noop_compiler_callbacks.h
index aed0014..1e4e701 100644
--- a/runtime/noop_compiler_callbacks.h
+++ b/runtime/noop_compiler_callbacks.h
@@ -26,9 +26,9 @@
NoopCompilerCallbacks() : CompilerCallbacks(CompilerCallbacks::CallbackMode::kCompileApp) {}
~NoopCompilerCallbacks() {}
- void AddUncompilableMethod(MethodReference ref ATTRIBUTE_UNUSED) override {}
- void AddUncompilableClass(ClassReference ref ATTRIBUTE_UNUSED) override {}
- void ClassRejected(ClassReference ref ATTRIBUTE_UNUSED) override {}
+ void AddUncompilableMethod([[maybe_unused]] MethodReference ref) override {}
+ void AddUncompilableClass([[maybe_unused]] ClassReference ref) override {}
+ void ClassRejected([[maybe_unused]] ClassReference ref) override {}
verifier::VerifierDeps* GetVerifierDeps() const override { return nullptr; }
diff --git a/runtime/oat_file.cc b/runtime/oat_file.cc
index 5f74584..c75a9ec 100644
--- a/runtime/oat_file.cc
+++ b/runtime/oat_file.cc
@@ -1159,12 +1159,12 @@
/*inout*/MemMap* reservation, // Where to load if not null.
/*out*/std::string* error_msg) override;
- bool Load(int oat_fd ATTRIBUTE_UNUSED,
- bool writable ATTRIBUTE_UNUSED,
- bool executable ATTRIBUTE_UNUSED,
- bool low_4gb ATTRIBUTE_UNUSED,
- /*inout*/MemMap* reservation ATTRIBUTE_UNUSED,
- /*out*/std::string* error_msg ATTRIBUTE_UNUSED) override {
+ bool Load([[maybe_unused]] int oat_fd,
+ [[maybe_unused]] bool writable,
+ [[maybe_unused]] bool executable,
+ [[maybe_unused]] bool low_4gb,
+ [[maybe_unused]] /*inout*/ MemMap* reservation,
+ [[maybe_unused]] /*out*/ std::string* error_msg) override {
return false;
}
@@ -1211,8 +1211,8 @@
#else
// Count the entries in dl_iterate_phdr we get at this point in time.
struct dl_iterate_context {
- static int callback(dl_phdr_info* info ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED,
+ static int callback([[maybe_unused]] dl_phdr_info* info,
+ [[maybe_unused]] size_t size,
void* data) {
reinterpret_cast<dl_iterate_context*>(data)->count++;
return 0; // Continue iteration.
@@ -1335,7 +1335,7 @@
if (reservation != nullptr && dlopen_handle_ != nullptr) {
// Find used pages from the reservation.
struct dl_iterate_context {
- static int callback(dl_phdr_info* info, size_t size ATTRIBUTE_UNUSED, void* data) {
+ static int callback(dl_phdr_info* info, [[maybe_unused]] size_t size, void* data) {
auto* context = reinterpret_cast<dl_iterate_context*>(data);
static_assert(std::is_same<Elf32_Half, Elf64_Half>::value, "Half must match");
using Elf_Half = Elf64_Half;
@@ -1433,7 +1433,7 @@
size_t memsz;
};
struct dl_iterate_context {
- static int callback(dl_phdr_info* info, size_t size ATTRIBUTE_UNUSED, void* data) {
+ static int callback(dl_phdr_info* info, [[maybe_unused]] size_t size, void* data) {
auto* context = reinterpret_cast<dl_iterate_context*>(data);
static_assert(std::is_same<Elf32_Half, Elf64_Half>::value, "Half must match");
using Elf_Half = Elf64_Half;
@@ -1597,8 +1597,7 @@
/*inout*/MemMap* reservation, // Where to load if not null.
/*out*/std::string* error_msg) override;
- void PreSetup(const std::string& elf_filename ATTRIBUTE_UNUSED) override {
- }
+ void PreSetup([[maybe_unused]] const std::string& elf_filename) override {}
private:
bool ElfFileOpen(File* file,
@@ -1853,29 +1852,29 @@
protected:
void PreLoad() override {}
- bool Load(const std::string& elf_filename ATTRIBUTE_UNUSED,
- bool writable ATTRIBUTE_UNUSED,
- bool executable ATTRIBUTE_UNUSED,
- bool low_4gb ATTRIBUTE_UNUSED,
- MemMap* reservation ATTRIBUTE_UNUSED,
- std::string* error_msg ATTRIBUTE_UNUSED) override {
+ bool Load([[maybe_unused]] const std::string& elf_filename,
+ [[maybe_unused]] bool writable,
+ [[maybe_unused]] bool executable,
+ [[maybe_unused]] bool low_4gb,
+ [[maybe_unused]] MemMap* reservation,
+ [[maybe_unused]] std::string* error_msg) override {
LOG(FATAL) << "Unsupported";
UNREACHABLE();
}
- bool Load(int oat_fd ATTRIBUTE_UNUSED,
- bool writable ATTRIBUTE_UNUSED,
- bool executable ATTRIBUTE_UNUSED,
- bool low_4gb ATTRIBUTE_UNUSED,
- MemMap* reservation ATTRIBUTE_UNUSED,
- std::string* error_msg ATTRIBUTE_UNUSED) override {
+ bool Load([[maybe_unused]] int oat_fd,
+ [[maybe_unused]] bool writable,
+ [[maybe_unused]] bool executable,
+ [[maybe_unused]] bool low_4gb,
+ [[maybe_unused]] MemMap* reservation,
+ [[maybe_unused]] std::string* error_msg) override {
LOG(FATAL) << "Unsupported";
UNREACHABLE();
}
- void PreSetup(const std::string& elf_filename ATTRIBUTE_UNUSED) override {}
+ void PreSetup([[maybe_unused]] const std::string& elf_filename) override {}
- const uint8_t* FindDynamicSymbolAddress(const std::string& symbol_name ATTRIBUTE_UNUSED,
+ const uint8_t* FindDynamicSymbolAddress([[maybe_unused]] const std::string& symbol_name,
std::string* error_msg) const override {
*error_msg = "Unsupported";
return nullptr;
diff --git a/runtime/oat_file_assistant_test.cc b/runtime/oat_file_assistant_test.cc
index 56d4c70..1a5c57f 100644
--- a/runtime/oat_file_assistant_test.cc
+++ b/runtime/oat_file_assistant_test.cc
@@ -1504,7 +1504,7 @@
lock_(lock),
loaded_oat_file_(nullptr) {}
- void Run(Thread* self ATTRIBUTE_UNUSED) override {
+ void Run([[maybe_unused]] Thread* self) override {
// Load the dex files, and save a pointer to the loaded oat file, so that
// we can verify only one oat file was loaded for the dex location.
std::vector<std::unique_ptr<const DexFile>> dex_files;
diff --git a/runtime/runtime_callbacks_test.cc b/runtime/runtime_callbacks_test.cc
index 6f0b8a1..2ba73cf 100644
--- a/runtime/runtime_callbacks_test.cc
+++ b/runtime/runtime_callbacks_test.cc
@@ -88,7 +88,7 @@
class ThreadLifecycleCallbackRuntimeCallbacksTest : public RuntimeCallbacksTest {
public:
- static void* PthreadsCallback(void* arg ATTRIBUTE_UNUSED) {
+ static void* PthreadsCallback([[maybe_unused]] void* arg) {
// Attach.
Runtime* runtime = Runtime::Current();
CHECK(runtime->AttachCurrentThread("ThreadLifecycle test thread", true, nullptr, false));
@@ -260,12 +260,12 @@
struct Callback : public ClassLoadCallback {
void ClassPreDefine(const char* descriptor,
- Handle<mirror::Class> klass ATTRIBUTE_UNUSED,
- Handle<mirror::ClassLoader> class_loader ATTRIBUTE_UNUSED,
+ [[maybe_unused]] Handle<mirror::Class> klass,
+ [[maybe_unused]] Handle<mirror::ClassLoader> class_loader,
const DexFile& initial_dex_file,
- const dex::ClassDef& initial_class_def ATTRIBUTE_UNUSED,
- /*out*/DexFile const** final_dex_file ATTRIBUTE_UNUSED,
- /*out*/dex::ClassDef const** final_class_def ATTRIBUTE_UNUSED) override
+ [[maybe_unused]] const dex::ClassDef& initial_class_def,
+ [[maybe_unused]] /*out*/ DexFile const** final_dex_file,
+ [[maybe_unused]] /*out*/ dex::ClassDef const** final_class_def) override
REQUIRES_SHARED(Locks::mutator_lock_) {
const std::string& location = initial_dex_file.GetLocation();
std::string event =
@@ -468,20 +468,20 @@
ref_ = { &k->GetDexFile(), k->GetDexClassDefIndex() };
}
- void MonitorContendedLocking(Monitor* mon ATTRIBUTE_UNUSED) override
- REQUIRES_SHARED(Locks::mutator_lock_) { }
+ void MonitorContendedLocking([[maybe_unused]] Monitor* mon) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {}
- void MonitorContendedLocked(Monitor* mon ATTRIBUTE_UNUSED) override
- REQUIRES_SHARED(Locks::mutator_lock_) { }
+ void MonitorContendedLocked([[maybe_unused]] Monitor* mon) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {}
- void ObjectWaitStart(Handle<mirror::Object> obj, int64_t millis ATTRIBUTE_UNUSED) override
+ void ObjectWaitStart(Handle<mirror::Object> obj, [[maybe_unused]] int64_t millis) override
REQUIRES_SHARED(Locks::mutator_lock_) {
if (IsInterestingObject(obj.Get())) {
saw_wait_start_ = true;
}
}
- void MonitorWaitFinished(Monitor* m, bool timed_out ATTRIBUTE_UNUSED) override
+ void MonitorWaitFinished(Monitor* m, [[maybe_unused]] bool timed_out) override
REQUIRES_SHARED(Locks::mutator_lock_) {
if (IsInterestingObject(m->GetObject())) {
saw_wait_finished_ = true;
diff --git a/runtime/runtime_image.cc b/runtime/runtime_image.cc
index f41d4c9..9be1d5e 100644
--- a/runtime/runtime_image.cc
+++ b/runtime/runtime_image.cc
@@ -668,7 +668,7 @@
explicit NativePointerVisitor(RuntimeImageHelper* helper) : helper_(helper) {}
template <typename T>
- T* operator()(T* ptr, void** dest_addr ATTRIBUTE_UNUSED) const {
+ T* operator()(T* ptr, [[maybe_unused]] void** dest_addr) const {
return helper_->NativeLocationInImage(ptr, /* must_have_relocation= */ true);
}
@@ -1186,11 +1186,11 @@
: image_(image), copy_offset_(copy_offset) {}
// We do not visit native roots. These are handled with other logic.
- void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
- const {
+ void VisitRootIfNonNull(
+ [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {
LOG(FATAL) << "UNREACHABLE";
}
- void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {
+ void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {
LOG(FATAL) << "UNREACHABLE";
}
@@ -1209,9 +1209,8 @@
}
// java.lang.ref.Reference visitor.
- void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Reference> ref) const
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass,
+ ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
}
diff --git a/runtime/startup_completed_task.cc b/runtime/startup_completed_task.cc
index 9358d48..a9a06bb 100644
--- a/runtime/startup_completed_task.cc
+++ b/runtime/startup_completed_task.cc
@@ -82,7 +82,7 @@
// - accessing the image space metadata section when we madvise it
// - accessing dex caches when we free them
static struct EmptyClosure : Closure {
- void Run(Thread* thread ATTRIBUTE_UNUSED) override {}
+ void Run([[maybe_unused]] Thread* thread) override {}
} closure;
runtime->GetThreadList()->RunCheckpoint(&closure);
diff --git a/runtime/string_builder_append.cc b/runtime/string_builder_append.cc
index 0083b91..2071733 100644
--- a/runtime/string_builder_append.cc
+++ b/runtime/string_builder_append.cc
@@ -492,7 +492,7 @@
}
inline void StringBuilderAppend::Builder::operator()(ObjPtr<mirror::Object> obj,
- size_t usable_size ATTRIBUTE_UNUSED) const {
+ [[maybe_unused]] size_t usable_size) const {
ObjPtr<mirror::String> new_string = ObjPtr<mirror::String>::DownCast(obj);
new_string->SetCount(length_with_flag_);
if (mirror::String::IsCompressed(length_with_flag_)) {
diff --git a/runtime/subtype_check_test.cc b/runtime/subtype_check_test.cc
index 719e5d9..5960bcc 100644
--- a/runtime/subtype_check_test.cc
+++ b/runtime/subtype_check_test.cc
@@ -89,8 +89,8 @@
bool CasField32(art::MemberOffset offset,
int32_t old_value,
int32_t new_value,
- CASMode mode ATTRIBUTE_UNUSED,
- std::memory_order memory_order ATTRIBUTE_UNUSED)
+ [[maybe_unused]] CASMode mode,
+ [[maybe_unused]] std::memory_order memory_order)
REQUIRES_SHARED(Locks::mutator_lock_) {
UNUSED(offset);
if (old_value == GetField32Volatile(offset)) {
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 6b1934c..00a1468 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -810,7 +810,7 @@
// Keep space uninitialized as it can overflow the stack otherwise (should Clang actually
// auto-initialize this local variable).
volatile char space[kPageSize - (kAsanMultiplier * 256)] __attribute__((uninitialized));
- char sink ATTRIBUTE_UNUSED = space[zero]; // NOLINT
+ [[maybe_unused]] char sink = space[zero];
// Remove tag from the pointer. Nop in non-hwasan builds.
uintptr_t addr = reinterpret_cast<uintptr_t>(
__hwasan_tag_pointer != nullptr ? __hwasan_tag_pointer(space, 0) : space);
@@ -2148,8 +2148,7 @@
static constexpr size_t kMaxRepetition = 3u;
- VisitMethodResult StartMethod(ArtMethod* m, size_t frame_nr ATTRIBUTE_UNUSED)
- override
+ VisitMethodResult StartMethod(ArtMethod* m, [[maybe_unused]] size_t frame_nr) override
REQUIRES_SHARED(Locks::mutator_lock_) {
m = m->GetInterfaceMethodIfProxy(kRuntimePointerSize);
ObjPtr<mirror::DexCache> dex_cache = m->GetDexCache();
@@ -2194,12 +2193,11 @@
return VisitMethodResult::kContinueMethod;
}
- VisitMethodResult EndMethod(ArtMethod* m ATTRIBUTE_UNUSED) override {
+ VisitMethodResult EndMethod([[maybe_unused]] ArtMethod* m) override {
return VisitMethodResult::kContinueMethod;
}
- void VisitWaitingObject(ObjPtr<mirror::Object> obj, ThreadState state ATTRIBUTE_UNUSED)
- override
+ void VisitWaitingObject(ObjPtr<mirror::Object> obj, [[maybe_unused]] ThreadState state) override
REQUIRES_SHARED(Locks::mutator_lock_) {
PrintObject(obj, " - waiting on ", ThreadList::kInvalidThreadId);
}
@@ -2531,8 +2529,8 @@
explicit MonitorExitVisitor(Thread* self) : self_(self) { }
// NO_THREAD_SAFETY_ANALYSIS due to MonitorExit.
- void VisitRoot(mirror::Object* entered_monitor, const RootInfo& info ATTRIBUTE_UNUSED)
- override NO_THREAD_SAFETY_ANALYSIS {
+ void VisitRoot(mirror::Object* entered_monitor,
+ [[maybe_unused]] const RootInfo& info) override NO_THREAD_SAFETY_ANALYSIS {
if (self_->HoldsLock(entered_monitor)) {
LOG(WARNING) << "Calling MonitorExit on object "
<< entered_monitor << " (" << entered_monitor->PrettyTypeOf() << ")"
@@ -3345,8 +3343,7 @@
soaa_(soaa_in) {}
protected:
- VisitMethodResult StartMethod(ArtMethod* m, size_t frame_nr ATTRIBUTE_UNUSED)
- override
+ VisitMethodResult StartMethod(ArtMethod* m, [[maybe_unused]] size_t frame_nr) override
REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::StackTraceElement> obj = CreateStackTraceElement(
soaa_, m, GetDexPc(/* abort on error */ false));
@@ -3357,7 +3354,7 @@
return VisitMethodResult::kContinueMethod;
}
- VisitMethodResult EndMethod(ArtMethod* m ATTRIBUTE_UNUSED) override {
+ VisitMethodResult EndMethod([[maybe_unused]] ArtMethod* m) override {
lock_objects_.push_back({});
lock_objects_[lock_objects_.size() - 1].swap(frame_lock_objects_);
@@ -3366,8 +3363,7 @@
return VisitMethodResult::kContinueMethod;
}
- void VisitWaitingObject(ObjPtr<mirror::Object> obj, ThreadState state ATTRIBUTE_UNUSED)
- override
+ void VisitWaitingObject(ObjPtr<mirror::Object> obj, [[maybe_unused]] ThreadState state) override
REQUIRES_SHARED(Locks::mutator_lock_) {
wait_jobject_.reset(soaa_.AddLocalReference<jobject>(obj));
}
@@ -3377,9 +3373,8 @@
wait_jobject_.reset(soaa_.AddLocalReference<jobject>(obj));
}
void VisitBlockedOnObject(ObjPtr<mirror::Object> obj,
- ThreadState state ATTRIBUTE_UNUSED,
- uint32_t owner_tid ATTRIBUTE_UNUSED)
- override
+ [[maybe_unused]] ThreadState state,
+ [[maybe_unused]] uint32_t owner_tid) override
REQUIRES_SHARED(Locks::mutator_lock_) {
block_jobject_.reset(soaa_.AddLocalReference<jobject>(obj));
}
@@ -4271,26 +4266,23 @@
void VisitQuickFrameNonPrecise() REQUIRES_SHARED(Locks::mutator_lock_) {
struct UndefinedVRegInfo {
- UndefinedVRegInfo(ArtMethod* method ATTRIBUTE_UNUSED,
- const CodeInfo& code_info ATTRIBUTE_UNUSED,
- const StackMap& map ATTRIBUTE_UNUSED,
+ UndefinedVRegInfo([[maybe_unused]] ArtMethod* method,
+ [[maybe_unused]] const CodeInfo& code_info,
+ [[maybe_unused]] const StackMap& map,
RootVisitor& _visitor)
- : visitor(_visitor) {
- }
+ : visitor(_visitor) {}
ALWAYS_INLINE
void VisitStack(mirror::Object** ref,
- size_t stack_index ATTRIBUTE_UNUSED,
- const StackVisitor* stack_visitor)
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] size_t stack_index,
+ const StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
visitor(ref, JavaFrameRootInfo::kImpreciseVreg, stack_visitor);
}
ALWAYS_INLINE
void VisitRegister(mirror::Object** ref,
- size_t register_index ATTRIBUTE_UNUSED,
- const StackVisitor* stack_visitor)
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] size_t register_index,
+ const StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
visitor(ref, JavaFrameRootInfo::kImpreciseVreg, stack_visitor);
}
@@ -4541,8 +4533,8 @@
class VerifyRootVisitor : public SingleRootVisitor {
public:
- void VisitRoot(mirror::Object* root, const RootInfo& info ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(Locks::mutator_lock_) {
+ void VisitRoot(mirror::Object* root, [[maybe_unused]] const RootInfo& info) override
+ REQUIRES_SHARED(Locks::mutator_lock_) {
VerifyObject(root);
}
};
diff --git a/runtime/trace.cc b/runtime/trace.cc
index 2e9f998..9045f50 100644
--- a/runtime/trace.cc
+++ b/runtime/trace.cc
@@ -349,7 +349,7 @@
the_trace->CompareAndUpdateStackTrace(thread, stack_trace);
}
-static void ClearThreadStackTraceAndClockBase(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
+static void ClearThreadStackTraceAndClockBase(Thread* thread, [[maybe_unused]] void* arg) {
thread->SetTraceClockBase(0);
std::vector<ArtMethod*>* stack_trace = thread->GetStackTraceSample();
thread->SetStackTraceSample(nullptr);
@@ -489,7 +489,7 @@
auto deleter = [](File* file) {
if (file != nullptr) {
file->MarkUnchecked(); // Don't deal with flushing requirements.
- int result ATTRIBUTE_UNUSED = file->Close();
+ [[maybe_unused]] int result = file->Close();
delete file;
}
};
@@ -916,8 +916,8 @@
}
}
-void Trace::DexPcMoved(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Object> this_object ATTRIBUTE_UNUSED,
+void Trace::DexPcMoved([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Object> this_object,
ArtMethod* method,
uint32_t new_dex_pc) {
// We're not recorded to listen to this kind of event, so complain.
@@ -925,23 +925,22 @@
<< " " << new_dex_pc;
}
-void Trace::FieldRead(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Object> this_object ATTRIBUTE_UNUSED,
+void Trace::FieldRead([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Object> this_object,
ArtMethod* method,
uint32_t dex_pc,
- ArtField* field ATTRIBUTE_UNUSED)
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ [[maybe_unused]] ArtField* field) REQUIRES_SHARED(Locks::mutator_lock_) {
// We're not recorded to listen to this kind of event, so complain.
LOG(ERROR) << "Unexpected field read event in tracing " << ArtMethod::PrettyMethod(method)
<< " " << dex_pc;
}
-void Trace::FieldWritten(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Object> this_object ATTRIBUTE_UNUSED,
+void Trace::FieldWritten([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Object> this_object,
ArtMethod* method,
uint32_t dex_pc,
- ArtField* field ATTRIBUTE_UNUSED,
- const JValue& field_value ATTRIBUTE_UNUSED)
+ [[maybe_unused]] ArtField* field,
+ [[maybe_unused]] const JValue& field_value)
REQUIRES_SHARED(Locks::mutator_lock_) {
// We're not recorded to listen to this kind of event, so complain.
LOG(ERROR) << "Unexpected field write event in tracing " << ArtMethod::PrettyMethod(method)
@@ -957,31 +956,29 @@
void Trace::MethodExited(Thread* thread,
ArtMethod* method,
- instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED,
- JValue& return_value ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] instrumentation::OptionalFrame frame,
+ [[maybe_unused]] JValue& return_value) {
uint32_t thread_clock_diff = 0;
uint64_t timestamp_counter = 0;
ReadClocks(thread, &thread_clock_diff, ×tamp_counter);
LogMethodTraceEvent(thread, method, kTraceMethodExit, thread_clock_diff, timestamp_counter);
}
-void Trace::MethodUnwind(Thread* thread,
- ArtMethod* method,
- uint32_t dex_pc ATTRIBUTE_UNUSED) {
+void Trace::MethodUnwind(Thread* thread, ArtMethod* method, [[maybe_unused]] uint32_t dex_pc) {
uint32_t thread_clock_diff = 0;
uint64_t timestamp_counter = 0;
ReadClocks(thread, &thread_clock_diff, ×tamp_counter);
LogMethodTraceEvent(thread, method, kTraceUnroll, thread_clock_diff, timestamp_counter);
}
-void Trace::ExceptionThrown(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Throwable> exception_object ATTRIBUTE_UNUSED)
+void Trace::ExceptionThrown([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Throwable> exception_object)
REQUIRES_SHARED(Locks::mutator_lock_) {
LOG(ERROR) << "Unexpected exception thrown event in tracing";
}
-void Trace::ExceptionHandled(Thread* thread ATTRIBUTE_UNUSED,
- Handle<mirror::Throwable> exception_object ATTRIBUTE_UNUSED)
+void Trace::ExceptionHandled([[maybe_unused]] Thread* thread,
+ [[maybe_unused]] Handle<mirror::Throwable> exception_object)
REQUIRES_SHARED(Locks::mutator_lock_) {
LOG(ERROR) << "Unexpected exception thrown event in tracing";
}
@@ -992,8 +989,8 @@
LOG(ERROR) << "Unexpected branch event in tracing" << ArtMethod::PrettyMethod(method);
}
-void Trace::WatchedFramePop(Thread* self ATTRIBUTE_UNUSED,
- const ShadowFrame& frame ATTRIBUTE_UNUSED) {
+void Trace::WatchedFramePop([[maybe_unused]] Thread* self,
+ [[maybe_unused]] const ShadowFrame& frame) {
LOG(ERROR) << "Unexpected WatchedFramePop event in tracing";
}
diff --git a/runtime/vdex_file.cc b/runtime/vdex_file.cc
index bdead55..d55876d 100644
--- a/runtime/vdex_file.cc
+++ b/runtime/vdex_file.cc
@@ -57,7 +57,7 @@
return (memcmp(vdex_version_, kVdexVersion, sizeof(kVdexVersion)) == 0);
}
-VdexFile::VdexFileHeader::VdexFileHeader(bool has_dex_section ATTRIBUTE_UNUSED)
+VdexFile::VdexFileHeader::VdexFileHeader([[maybe_unused]] bool has_dex_section)
: number_of_sections_(static_cast<uint32_t>(VdexSection::kNumberOfSections)) {
memcpy(magic_, kVdexMagic, sizeof(kVdexMagic));
memcpy(vdex_version_, kVdexVersion, sizeof(kVdexVersion));
diff --git a/runtime/verifier/reg_type.h b/runtime/verifier/reg_type.h
index 965bbaf..c13784c 100644
--- a/runtime/verifier/reg_type.h
+++ b/runtime/verifier/reg_type.h
@@ -312,7 +312,7 @@
cache_id_(cache_id) {}
template <typename Class>
- void CheckConstructorInvariants(Class* this_ ATTRIBUTE_UNUSED) const
+ void CheckConstructorInvariants([[maybe_unused]] Class* this_) const
REQUIRES_SHARED(Locks::mutator_lock_) {
static_assert(std::is_final<Class>::value, "Class must be final.");
if (kIsDebugBuild) {
diff --git a/runtime/write_barrier-inl.h b/runtime/write_barrier-inl.h
index af8c1be..ee6b336 100644
--- a/runtime/write_barrier-inl.h
+++ b/runtime/write_barrier-inl.h
@@ -28,7 +28,7 @@
template <WriteBarrier::NullCheck kNullCheck>
inline void WriteBarrier::ForFieldWrite(ObjPtr<mirror::Object> dst,
- MemberOffset offset ATTRIBUTE_UNUSED,
+ [[maybe_unused]] MemberOffset offset,
ObjPtr<mirror::Object> new_value) {
if (kNullCheck == kWithNullCheck && new_value == nullptr) {
return;
@@ -38,8 +38,8 @@
}
inline void WriteBarrier::ForArrayWrite(ObjPtr<mirror::Object> dst,
- int start_offset ATTRIBUTE_UNUSED,
- size_t length ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] int start_offset,
+ [[maybe_unused]] size_t length) {
GetCardTable()->MarkCard(dst.Ptr());
}
diff --git a/runtime/write_barrier.h b/runtime/write_barrier.h
index 112154e..8080b0d 100644
--- a/runtime/write_barrier.h
+++ b/runtime/write_barrier.h
@@ -38,15 +38,15 @@
// safe-point. The call is not needed if null is stored in the field.
template <NullCheck kNullCheck = kWithNullCheck>
ALWAYS_INLINE static void ForFieldWrite(ObjPtr<mirror::Object> dst,
- MemberOffset offset ATTRIBUTE_UNUSED,
- ObjPtr<mirror::Object> new_value ATTRIBUTE_UNUSED)
+ [[maybe_unused]] MemberOffset offset,
+ [[maybe_unused]] ObjPtr<mirror::Object> new_value)
REQUIRES_SHARED(Locks::mutator_lock_);
// Must be called if a reference field of an ObjectArray in the heap changes, and before any GC
// safe-point. The call is not needed if null is stored in the field.
ALWAYS_INLINE static void ForArrayWrite(ObjPtr<mirror::Object> dst,
- int start_offset ATTRIBUTE_UNUSED,
- size_t length ATTRIBUTE_UNUSED)
+ [[maybe_unused]] int start_offset,
+ [[maybe_unused]] size_t length)
REQUIRES_SHARED(Locks::mutator_lock_);
// Write barrier for every reference field in an object.
diff --git a/sigchainlib/sigchain_fake.cc b/sigchainlib/sigchain_fake.cc
index 2386154..0e62eeb 100644
--- a/sigchainlib/sigchain_fake.cc
+++ b/sigchainlib/sigchain_fake.cc
@@ -20,8 +20,6 @@
#include "log.h"
#include "sigchain.h"
-#define ATTRIBUTE_UNUSED __attribute__((__unused__))
-
// We cannot annotate the declarations, as they are not no-return in the non-fake version.
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunknown-pragmas"
@@ -29,24 +27,24 @@
namespace art {
-extern "C" void EnsureFrontOfChain(int signal ATTRIBUTE_UNUSED) {
+extern "C" void EnsureFrontOfChain([[maybe_unused]] int signal) {
log("EnsureFrontOfChain is not exported by the main executable.");
abort();
}
-extern "C" void AddSpecialSignalHandlerFn(int signal ATTRIBUTE_UNUSED,
- SigchainAction* sa ATTRIBUTE_UNUSED) {
+extern "C" void AddSpecialSignalHandlerFn([[maybe_unused]] int signal,
+ [[maybe_unused]] SigchainAction* sa) {
log("SetSpecialSignalHandlerFn is not exported by the main executable.");
abort();
}
-extern "C" void RemoveSpecialSignalHandlerFn(int signal ATTRIBUTE_UNUSED,
- bool (*fn)(int, siginfo_t*, void*) ATTRIBUTE_UNUSED) {
+extern "C" void RemoveSpecialSignalHandlerFn([[maybe_unused]] int signal,
+ [[maybe_unused]] bool (*fn)(int, siginfo_t*, void*)) {
log("SetSpecialSignalHandlerFn is not exported by the main executable.");
abort();
}
-extern "C" void SkipAddSignalHandler(bool value ATTRIBUTE_UNUSED) {
+extern "C" void SkipAddSignalHandler([[maybe_unused]] bool value) {
log("SkipAddSignalHandler is not exported by the main executable.");
abort();
}
diff --git a/sigchainlib/sigchain_test.cc b/sigchainlib/sigchain_test.cc
index 5e9c7fe..d8ff4d5 100644
--- a/sigchainlib/sigchain_test.cc
+++ b/sigchainlib/sigchain_test.cc
@@ -267,8 +267,8 @@
ASSERT_EQ(0, sigaction(SIGSEGV, &action, nullptr));
auto* tagged_null = reinterpret_cast<int*>(0x2bULL << 56);
- EXPECT_EXIT({ volatile int load __attribute__((unused)) = *tagged_null; },
- testing::ExitedWithCode(0), "");
+ EXPECT_EXIT(
+ { [[maybe_unused]] volatile int load = *tagged_null; }, testing::ExitedWithCode(0), "");
// Our sigaction implementation always implements the "clear unknown bits"
// semantics for oldact.sa_flags regardless of kernel version so we rely on it
@@ -277,8 +277,9 @@
ASSERT_EQ(0, sigaction(SIGSEGV, &action, nullptr));
ASSERT_EQ(0, sigaction(SIGSEGV, nullptr, &action));
if (action.sa_flags & SA_EXPOSE_TAGBITS) {
- EXPECT_EXIT({ volatile int load __attribute__((unused)) = *tagged_null; },
- testing::ExitedWithCode(0x2b), "");
+ EXPECT_EXIT({ [[maybe_unused]] volatile int load = *tagged_null; },
+ testing::ExitedWithCode(0x2b),
+ "");
}
}
#endif
diff --git a/test/004-SignalTest/signaltest.cc b/test/004-SignalTest/signaltest.cc
index 18b4502..916fb57 100644
--- a/test/004-SignalTest/signaltest.cc
+++ b/test/004-SignalTest/signaltest.cc
@@ -53,15 +53,15 @@
#define BLOCKED_SIGNAL SIGUSR1
#define UNBLOCKED_SIGNAL SIGUSR2
-static void blocked_signal(int sig ATTRIBUTE_UNUSED) {
+static void blocked_signal([[maybe_unused]] int sig) {
printf("blocked signal received\n");
}
-static void unblocked_signal(int sig ATTRIBUTE_UNUSED) {
+static void unblocked_signal([[maybe_unused]] int sig) {
printf("unblocked signal received\n");
}
-static void signalhandler(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED,
+static void signalhandler([[maybe_unused]] int sig, [[maybe_unused]] siginfo_t* info,
void* context) {
printf("signal caught\n");
++signal_count;
diff --git a/test/044-proxy/native_proxy.cc b/test/044-proxy/native_proxy.cc
index f3178f9..e86c5a8 100644
--- a/test/044-proxy/native_proxy.cc
+++ b/test/044-proxy/native_proxy.cc
@@ -21,7 +21,7 @@
namespace art {
extern "C" JNIEXPORT void JNICALL Java_NativeProxy_nativeCall(
- JNIEnv* env, jclass clazz ATTRIBUTE_UNUSED, jobject inf_ref) {
+ JNIEnv* env, [[maybe_unused]] jclass clazz, jobject inf_ref) {
jclass native_inf_class = env->FindClass("NativeInterface");
CHECK(native_inf_class != nullptr);
jmethodID mid = env->GetMethodID(native_inf_class, "callback", "()V");
diff --git a/test/051-thread/thread_test.cc b/test/051-thread/thread_test.cc
index 33841eb..6c70923 100644
--- a/test/051-thread/thread_test.cc
+++ b/test/051-thread/thread_test.cc
@@ -21,13 +21,12 @@
namespace art {
extern "C" JNIEXPORT jint JNICALL Java_Main_getNativePriority(JNIEnv* env,
- jclass clazz ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass clazz) {
return Thread::ForEnv(env)->GetNativePriority();
}
extern "C" JNIEXPORT jboolean JNICALL Java_Main_supportsThreadPriorities(
- JNIEnv* env ATTRIBUTE_UNUSED,
- jclass clazz ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass clazz) {
#if defined(ART_TARGET_ANDROID)
return JNI_TRUE;
#else
diff --git a/test/115-native-bridge/nativebridge.cc b/test/115-native-bridge/nativebridge.cc
index 4388747..65d7fcd 100644
--- a/test/115-native-bridge/nativebridge.cc
+++ b/test/115-native-bridge/nativebridge.cc
@@ -185,9 +185,9 @@
// This code is adapted from 004-SignalTest and causes a segfault.
char *go_away_compiler = nullptr;
-[[ noreturn ]] static void test_sigaction_handler(int sig ATTRIBUTE_UNUSED,
- siginfo_t* info ATTRIBUTE_UNUSED,
- void* context ATTRIBUTE_UNUSED) {
+[[ noreturn ]] static void test_sigaction_handler([[maybe_unused]] int sig,
+ [[maybe_unused]] siginfo_t* info,
+ [[maybe_unused]] void* context) {
printf("Should not reach the test sigaction handler.");
abort();
}
@@ -423,7 +423,7 @@
// NativeBridgeCallbacks implementations
extern "C" bool native_bridge_initialize(const android::NativeBridgeRuntimeCallbacks* art_cbs,
const char* app_code_cache_dir,
- const char* isa ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] const char* isa) {
struct stat st;
if (app_code_cache_dir != nullptr) {
if (stat(app_code_cache_dir, &st) == 0) {
@@ -471,7 +471,7 @@
}
extern "C" void* native_bridge_getTrampoline(void* handle, const char* name, const char* shorty,
- uint32_t len ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] uint32_t len) {
printf("Getting trampoline for %s with shorty %s.\n", name, shorty);
// The name here is actually the JNI name, so we can directly do the lookup.
@@ -532,7 +532,7 @@
// v2 parts.
-extern "C" bool native_bridge_isCompatibleWith(uint32_t bridge_version ATTRIBUTE_UNUSED) {
+extern "C" bool native_bridge_isCompatibleWith([[maybe_unused]] uint32_t bridge_version) {
return true;
}
@@ -557,7 +557,7 @@
#endif
#endif
-static bool StandardSignalHandler(int sig, siginfo_t* info ATTRIBUTE_UNUSED, void* context) {
+static bool StandardSignalHandler(int sig, [[maybe_unused]] siginfo_t* info, void* context) {
if (sig == SIGSEGV) {
#if defined(__arm__)
ucontext_t* uc = reinterpret_cast<ucontext_t*>(context);
@@ -610,7 +610,7 @@
return nullptr;
}
-extern "C" int native_bridge_unloadLibrary(void* handle ATTRIBUTE_UNUSED) {
+extern "C" int native_bridge_unloadLibrary([[maybe_unused]] void* handle) {
printf("dlclose() in native bridge.\n");
return 0;
}
@@ -620,40 +620,43 @@
return "";
}
-extern "C" bool native_bridge_isPathSupported(const char* library_path ATTRIBUTE_UNUSED) {
+extern "C" bool native_bridge_isPathSupported([[maybe_unused]] const char* library_path) {
printf("Checking for path support in native bridge.\n");
return false;
}
-extern "C" bool native_bridge_initAnonymousNamespace(const char* public_ns_sonames ATTRIBUTE_UNUSED,
- const char* anon_ns_library_path ATTRIBUTE_UNUSED) {
+extern "C" bool native_bridge_initAnonymousNamespace(
+ [[maybe_unused]] const char* public_ns_sonames,
+ [[maybe_unused]] const char* anon_ns_library_path) {
printf("Initializing anonymous namespace in native bridge.\n");
return false;
}
extern "C" android::native_bridge_namespace_t*
-native_bridge_createNamespace(const char* name ATTRIBUTE_UNUSED,
- const char* ld_library_path ATTRIBUTE_UNUSED,
- const char* default_library_path ATTRIBUTE_UNUSED,
- uint64_t type ATTRIBUTE_UNUSED,
- const char* permitted_when_isolated_path ATTRIBUTE_UNUSED,
- android::native_bridge_namespace_t* parent_ns ATTRIBUTE_UNUSED) {
+native_bridge_createNamespace([[maybe_unused]] const char* name,
+ [[maybe_unused]] const char* ld_library_path,
+ [[maybe_unused]] const char* default_library_path,
+ [[maybe_unused]] uint64_t type,
+ [[maybe_unused]] const char* permitted_when_isolated_path,
+ [[maybe_unused]] android::native_bridge_namespace_t* parent_ns) {
printf("Creating namespace in native bridge.\n");
return nullptr;
}
-extern "C" bool native_bridge_linkNamespaces(android::native_bridge_namespace_t* from ATTRIBUTE_UNUSED,
- android::native_bridge_namespace_t* to ATTRIBUTE_UNUSED,
- const char* shared_libs_sonames ATTRIBUTE_UNUSED) {
+extern "C" bool native_bridge_linkNamespaces(
+ [[maybe_unused]] android::native_bridge_namespace_t* from,
+ [[maybe_unused]] android::native_bridge_namespace_t* to,
+ [[maybe_unused]] const char* shared_libs_sonames) {
printf("Linking namespaces in native bridge.\n");
return false;
}
-extern "C" void* native_bridge_loadLibraryExt(const char* libpath ATTRIBUTE_UNUSED,
- int flag ATTRIBUTE_UNUSED,
- android::native_bridge_namespace_t* ns ATTRIBUTE_UNUSED) {
- printf("Loading library with Extension in native bridge.\n");
- return nullptr;
+extern "C" void* native_bridge_loadLibraryExt(
+ [[maybe_unused]] const char* libpath,
+ [[maybe_unused]] int flag,
+ [[maybe_unused]] android::native_bridge_namespace_t* ns) {
+ printf("Loading library with Extension in native bridge.\n");
+ return nullptr;
}
// "NativeBridgeItf" is effectively an API (it is the name of the symbol that will be loaded
diff --git a/test/137-cfi/cfi.cc b/test/137-cfi/cfi.cc
index cd5b4cf..29a6ff5 100644
--- a/test/137-cfi/cfi.cc
+++ b/test/137-cfi/cfi.cc
@@ -212,7 +212,7 @@
static constexpr int kMaxTotalSleepTimeMicroseconds = 10000000; // 10 seconds
// Wait for a sigstop. This code is copied from libbacktrace.
-int wait_for_sigstop(pid_t tid, int* total_sleep_time_usec, bool* detach_failed ATTRIBUTE_UNUSED) {
+int wait_for_sigstop(pid_t tid, int* total_sleep_time_usec, [[maybe_unused]] bool* detach_failed) {
for (;;) {
int status;
pid_t n = TEMP_FAILURE_RETRY(waitpid(tid, &status, __WALL | WNOHANG | WUNTRACED));
diff --git a/test/1919-vminit-thread-start-timing/vminit.cc b/test/1919-vminit-thread-start-timing/vminit.cc
index ddf6649..2318961 100644
--- a/test/1919-vminit-thread-start-timing/vminit.cc
+++ b/test/1919-vminit-thread-start-timing/vminit.cc
@@ -65,7 +65,7 @@
static void JNICALL Test1919AgentThread(jvmtiEnv* jvmti,
JNIEnv* env,
- void* arg ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* arg) {
EventList* list = nullptr;
CheckJvmtiError(jvmti, jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&list)));
CheckJvmtiError(jvmti, jvmti->RawMonitorEnter(list->events_mutex));
@@ -140,8 +140,8 @@
}
jint OnLoad(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0) != 0) {
printf("Unable to get jvmti env!\n");
return 1;
diff --git a/test/1922-owned-monitors-info/owned_monitors.cc b/test/1922-owned-monitors-info/owned_monitors.cc
index 66a8368..e95c914 100644
--- a/test/1922-owned-monitors-info/owned_monitors.cc
+++ b/test/1922-owned-monitors-info/owned_monitors.cc
@@ -68,7 +68,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test1922_00024Target_lockNative(
- JNIEnv* env, jobject thiz ATTRIBUTE_UNUSED, jobject mon, jobject next) {
+ JNIEnv* env, [[maybe_unused]] jobject thiz, jobject mon, jobject next) {
if (doMonitorEnter(env, mon)) {
return;
}
diff --git a/test/1936-thread-end-events/method_trace.cc b/test/1936-thread-end-events/method_trace.cc
index 019b6a9..edfff90 100644
--- a/test/1936-thread-end-events/method_trace.cc
+++ b/test/1936-thread-end-events/method_trace.cc
@@ -52,8 +52,8 @@
return env->CallStaticObjectMethod(klass, targetMethod);
}
-extern "C" JNIEXPORT void JNICALL Java_art_Test989_doNothingNative(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT void JNICALL Java_art_Test989_doNothingNative([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
return;
}
diff --git a/test/1945-proxy-method-arguments/get_args.cc b/test/1945-proxy-method-arguments/get_args.cc
index 859e229..5191761 100644
--- a/test/1945-proxy-method-arguments/get_args.cc
+++ b/test/1945-proxy-method-arguments/get_args.cc
@@ -104,7 +104,7 @@
}
extern "C" JNIEXPORT jobject JNICALL Java_TestInvocationHandler_getArgument(
- JNIEnv* env ATTRIBUTE_UNUSED, jobject thiz ATTRIBUTE_UNUSED, int arg_pos, int frame_depth) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject thiz, int arg_pos, int frame_depth) {
return GetProxyReferenceArgument(arg_pos, frame_depth);
}
diff --git a/test/1950-unprepared-transform/unprepared_transform.cc b/test/1950-unprepared-transform/unprepared_transform.cc
index 620ede8..93c4b3e 100644
--- a/test/1950-unprepared-transform/unprepared_transform.cc
+++ b/test/1950-unprepared-transform/unprepared_transform.cc
@@ -37,15 +37,15 @@
jclass kMainClass = nullptr;
jmethodID kPrepareFunc = nullptr;
-extern "C" JNIEXPORT void ClassLoadCallback(jvmtiEnv* jvmti ATTRIBUTE_UNUSED,
- JNIEnv* env,
- jthread thr ATTRIBUTE_UNUSED,
- jclass klass) {
+extern "C" JNIEXPORT void ClassLoadCallback([[maybe_unused]] jvmtiEnv* jvmti,
+ JNIEnv* env,
+ [[maybe_unused]] jthread thr,
+ jclass klass) {
env->CallStaticVoidMethod(kMainClass, kPrepareFunc, klass);
}
extern "C" JNIEXPORT void JNICALL Java_Main_clearClassLoadHook(
- JNIEnv* env, jclass main ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass main, jthread thr) {
JvmtiErrorToException(env,
jvmti_env,
jvmti_env->SetEventNotificationMode(JVMTI_DISABLE,
diff --git a/test/1953-pop-frame/pop_frame.cc b/test/1953-pop-frame/pop_frame.cc
index 86345d6..9b3af96 100644
--- a/test/1953-pop-frame/pop_frame.cc
+++ b/test/1953-pop-frame/pop_frame.cc
@@ -44,7 +44,7 @@
extern "C" JNIEXPORT
void JNICALL Java_art_Test1953_popFrame(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jthread thr) {
JvmtiErrorToException(env, jvmti_env, jvmti_env->PopFrame(thr));
}
diff --git a/test/1957-error-ext/lasterror.cc b/test/1957-error-ext/lasterror.cc
index 5aa3fbe..41c5f13 100644
--- a/test/1957-error-ext/lasterror.cc
+++ b/test/1957-error-ext/lasterror.cc
@@ -84,7 +84,7 @@
}
extern "C" JNIEXPORT
-jstring JNICALL Java_art_Test1957_getLastError(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+jstring JNICALL Java_art_Test1957_getLastError(JNIEnv* env, [[maybe_unused]] jclass klass) {
GetLastError get_last_error = reinterpret_cast<GetLastError>(
FindExtensionMethod(env, "com.android.art.misc.get_last_error_message"));
if (get_last_error == nullptr) {
@@ -99,7 +99,7 @@
}
extern "C" JNIEXPORT
-void JNICALL Java_art_Test1957_clearLastError(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+void JNICALL Java_art_Test1957_clearLastError(JNIEnv* env, [[maybe_unused]] jclass klass) {
ClearLastError clear_last_error = reinterpret_cast<ClearLastError>(
FindExtensionMethod(env, "com.android.art.misc.clear_last_error_message"));
if (clear_last_error == nullptr) {
diff --git a/test/1959-redefine-object-instrument/fake_redef_object.cc b/test/1959-redefine-object-instrument/fake_redef_object.cc
index b1201ab..a5b6a7d 100644
--- a/test/1959-redefine-object-instrument/fake_redef_object.cc
+++ b/test/1959-redefine-object-instrument/fake_redef_object.cc
@@ -39,10 +39,10 @@
// Just pull it out of the dex file but don't bother changing anything.
static void JNICALL RedefineObjectHook(jvmtiEnv *jvmti_env,
JNIEnv* env,
- jclass class_being_redefined ATTRIBUTE_UNUSED,
- jobject loader ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass class_being_redefined,
+ [[maybe_unused]] jobject loader,
const char* name,
- jobject protection_domain ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject protection_domain,
jint class_data_len,
const unsigned char* class_data,
jint* new_class_data_len,
@@ -93,7 +93,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_Main_forceRedefine(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jclass obj_class,
jthread thr) {
if (IsJVM()) {
diff --git a/test/1962-multi-thread-events/multi_thread_events.cc b/test/1962-multi-thread-events/multi_thread_events.cc
index aeb15b0..f27640a 100644
--- a/test/1962-multi-thread-events/multi_thread_events.cc
+++ b/test/1962-multi-thread-events/multi_thread_events.cc
@@ -38,8 +38,8 @@
JNIEnv* env,
jthread thread,
jmethodID method,
- jboolean was_exception ATTRIBUTE_UNUSED,
- jvalue val ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jboolean was_exception,
+ [[maybe_unused]] jvalue val) {
BreakpointData* data = nullptr;
if (JvmtiErrorToException(
env, jvmti, jvmti->GetThreadLocalStorage(thread, reinterpret_cast<void**>(&data)))) {
@@ -56,7 +56,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test1962_setupTest(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass klass) {
jvmtiCapabilities caps{
.can_generate_method_exit_events = 1,
};
@@ -70,7 +70,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test1962_setupThread(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr, jobject events, jobject target) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr, jobject events, jobject target) {
BreakpointData* data = nullptr;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->Allocate(sizeof(*data), reinterpret_cast<uint8_t**>(&data)))) {
diff --git a/test/1972-jni-id-swap-indices/jni_id.cc b/test/1972-jni-id-swap-indices/jni_id.cc
index 7de7131..f3c2a62 100644
--- a/test/1972-jni-id-swap-indices/jni_id.cc
+++ b/test/1972-jni-id-swap-indices/jni_id.cc
@@ -27,7 +27,7 @@
namespace art {
extern "C" JNIEXPORT jlong JNICALL Java_Main_GetMethodId(JNIEnv* env,
- jclass k ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass k,
bool is_static,
jclass target,
jstring name,
@@ -42,18 +42,18 @@
return res;
}
-extern "C" JNIEXPORT jobject JNICALL Java_Main_GetJniType(JNIEnv* env, jclass k ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT jobject JNICALL Java_Main_GetJniType(JNIEnv* env, [[maybe_unused]] jclass k) {
std::ostringstream oss;
oss << Runtime::Current()->GetJniIdType();
return env->NewStringUTF(oss.str().c_str());
}
-extern "C" JNIEXPORT void JNICALL Java_Main_SetToPointerIds(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass k ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT void JNICALL Java_Main_SetToPointerIds([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass k) {
Runtime::Current()->SetJniIdType(JniIdType::kPointer);
}
-extern "C" JNIEXPORT void JNICALL Java_Main_SetToIndexIds(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass k ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT void JNICALL Java_Main_SetToIndexIds([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass k) {
Runtime::Current()->SetJniIdType(JniIdType::kIndices);
}
diff --git a/test/1974-resize-array/resize_array.cc b/test/1974-resize-array/resize_array.cc
index 60037b8..1746821 100644
--- a/test/1974-resize-array/resize_array.cc
+++ b/test/1974-resize-array/resize_array.cc
@@ -110,7 +110,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test1974_ResizeArray(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobject ref_gen,
jint new_size) {
ChangeArraySize change_array_size = reinterpret_cast<ChangeArraySize>(
@@ -125,24 +125,24 @@
}
extern "C" JNIEXPORT jobject JNICALL Java_art_Test1974_ReadJniRef(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jlong r) {
return env->NewLocalRef(reinterpret_cast<jobject>(static_cast<intptr_t>(r)));
}
extern "C" JNIEXPORT jlong JNICALL
-Java_art_Test1974_GetWeakGlobalJniRef(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject r) {
+Java_art_Test1974_GetWeakGlobalJniRef(JNIEnv* env, [[maybe_unused]] jclass klass, jobject r) {
return static_cast<jlong>(reinterpret_cast<intptr_t>(env->NewWeakGlobalRef(r)));
}
extern "C" JNIEXPORT jlong JNICALL Java_art_Test1974_GetGlobalJniRef(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobject r) {
return static_cast<jlong>(reinterpret_cast<intptr_t>(env->NewGlobalRef(r)));
}
extern "C" JNIEXPORT jobjectArray JNICALL
-Java_art_Test1974_GetObjectsWithTag(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) {
+Java_art_Test1974_GetObjectsWithTag(JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag) {
jsize cnt = 0;
jobject* res = nullptr;
if (JvmtiErrorToException(
@@ -161,7 +161,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test1974_runNativeTest(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobjectArray arr,
jobject resize,
jobject print,
@@ -181,7 +181,7 @@
};
extern "C" JNIEXPORT void JNICALL Java_art_Test1974_StartCollectFrees(JNIEnv* env,
- jclass k ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass k) {
jvmtiEventCallbacks cb{
.ObjectFree =
[](jvmtiEnv* jvmti, jlong tag) {
@@ -208,14 +208,14 @@
}
extern "C" JNIEXPORT void JNICALL
-Java_art_Test1974_StartAssignObsoleteIncrementedId(JNIEnv* env, jclass k ATTRIBUTE_UNUSED) {
+Java_art_Test1974_StartAssignObsoleteIncrementedId(JNIEnv* env, [[maybe_unused]] jclass k) {
jint id = FindExtensionEvent(env, "com.android.art.heap.obsolete_object_created");
if (env->ExceptionCheck()) {
LOG(INFO) << "Could not find extension event!";
return;
}
using ObsoleteEvent = void (*)(jvmtiEnv * env, jlong * obsolete, jlong * non_obsolete);
- ObsoleteEvent oe = [](jvmtiEnv* env ATTRIBUTE_UNUSED, jlong* obsolete, jlong* non_obsolete) {
+ ObsoleteEvent oe = []([[maybe_unused]] jvmtiEnv* env, jlong* obsolete, jlong* non_obsolete) {
*non_obsolete = *obsolete;
*obsolete = *obsolete + 1;
};
@@ -226,7 +226,7 @@
}
extern "C" JNIEXPORT void JNICALL
-Java_art_Test1974_EndAssignObsoleteIncrementedId(JNIEnv* env, jclass k ATTRIBUTE_UNUSED) {
+Java_art_Test1974_EndAssignObsoleteIncrementedId(JNIEnv* env, [[maybe_unused]] jclass k) {
jint id = FindExtensionEvent(env, "com.android.art.heap.obsolete_object_created");
if (env->ExceptionCheck()) {
LOG(INFO) << "Could not find extension event!";
@@ -236,7 +236,7 @@
}
extern "C" JNIEXPORT jlongArray JNICALL
-Java_art_Test1974_CollectFreedTags(JNIEnv* env, jclass k ATTRIBUTE_UNUSED) {
+Java_art_Test1974_CollectFreedTags(JNIEnv* env, [[maybe_unused]] jclass k) {
if (JvmtiErrorToException(
env,
jvmti_env,
diff --git a/test/2005-pause-all-redefine-multithreaded/pause-all.cc b/test/2005-pause-all-redefine-multithreaded/pause-all.cc
index 37d6c4d..be0428d 100644
--- a/test/2005-pause-all-redefine-multithreaded/pause-all.cc
+++ b/test/2005-pause-all-redefine-multithreaded/pause-all.cc
@@ -35,7 +35,7 @@
extern "C" JNIEXPORT void JNICALL
Java_art_Test2005_UpdateFieldValuesAndResumeThreads(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobjectArray threads_arr,
jclass redefined_class,
jobjectArray new_fields,
@@ -54,10 +54,10 @@
CHECK_EQ(jvmti_env->IterateOverInstancesOfClass(
redefined_class,
JVMTI_HEAP_OBJECT_EITHER,
- [](jlong class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
+ []([[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong size,
jlong* tag_ptr,
- void* user_data ATTRIBUTE_UNUSED) -> jvmtiIterationControl {
+ [[maybe_unused]] void* user_data) -> jvmtiIterationControl {
*tag_ptr = kRedefinedObjectTag;
return JVMTI_ITERATION_CONTINUE;
},
@@ -87,7 +87,7 @@
}
extern "C" JNIEXPORT jobject JNICALL
-Java_Main_fastNativeSleepAndReturnInteger42(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+Java_Main_fastNativeSleepAndReturnInteger42(JNIEnv* env, [[maybe_unused]] jclass klass) {
jclass integer_class = env->FindClass("java/lang/Integer");
CHECK(integer_class != nullptr);
jmethodID integer_value_of =
diff --git a/test/2009-structural-local-ref/local-ref.cc b/test/2009-structural-local-ref/local-ref.cc
index 9f6ef0b..5bd3287 100644
--- a/test/2009-structural-local-ref/local-ref.cc
+++ b/test/2009-structural-local-ref/local-ref.cc
@@ -32,7 +32,7 @@
namespace Test2009StructuralLocalRef {
extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalCallStatic(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jobject thnk) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jobject thnk) {
jclass obj_klass = env->GetObjectClass(obj);
jmethodID run_meth = env->GetMethodID(env->FindClass("java/lang/Runnable"), "run", "()V");
env->CallVoidMethod(thnk, run_meth);
@@ -46,7 +46,7 @@
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalCallVirtual(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jobject thnk) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jobject thnk) {
jclass obj_klass = env->GetObjectClass(obj);
jmethodID run_meth = env->GetMethodID(env->FindClass("java/lang/Runnable"), "run", "()V");
env->CallVoidMethod(thnk, run_meth);
@@ -58,7 +58,7 @@
}
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalGetIField(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jobject thnk) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jobject thnk) {
jclass obj_klass = env->GetObjectClass(obj);
jmethodID run_meth = env->GetMethodID(env->FindClass("java/lang/Runnable"), "run", "()V");
env->CallVoidMethod(thnk, run_meth);
@@ -71,7 +71,7 @@
}
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalGetSField(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jobject thnk) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jobject thnk) {
jclass obj_klass = env->GetObjectClass(obj);
jmethodID run_meth = env->GetMethodID(env->FindClass("java/lang/Runnable"), "run", "()V");
env->CallVoidMethod(thnk, run_meth);
diff --git a/test/2012-structural-redefinition-failures-jni-id/set-jni-id-used.cc b/test/2012-structural-redefinition-failures-jni-id/set-jni-id-used.cc
index 4b3dac9..cd740aa 100644
--- a/test/2012-structural-redefinition-failures-jni-id/set-jni-id-used.cc
+++ b/test/2012-structural-redefinition-failures-jni-id/set-jni-id-used.cc
@@ -36,7 +36,7 @@
namespace Test2012SetJniIdUsed {
extern "C" JNIEXPORT void JNICALL Java_Main_SetPointerIdsUsed(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jclass target) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jclass target) {
ScopedObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::Class> h(hs.NewHandle(soa.Decode<mirror::Class>(target)));
diff --git a/test/2033-shutdown-mechanics/native_shutdown.cc b/test/2033-shutdown-mechanics/native_shutdown.cc
index 2b7546a..9cfc989 100644
--- a/test/2033-shutdown-mechanics/native_shutdown.cc
+++ b/test/2033-shutdown-mechanics/native_shutdown.cc
@@ -34,7 +34,7 @@
extern "C" [[noreturn]] JNIEXPORT void JNICALL Java_Main_monitorShutdown(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
bool found_shutdown = false;
bool found_runtime_deleted = false;
JNIEnvExt* const extEnv = down_cast<JNIEnvExt*>(env);
diff --git a/test/2035-structural-native-method/structural-native.cc b/test/2035-structural-native-method/structural-native.cc
index bf51c8b..a47e91c 100644
--- a/test/2035-structural-native-method/structural-native.cc
+++ b/test/2035-structural-native-method/structural-native.cc
@@ -31,12 +31,12 @@
namespace art {
namespace Test2035StructuralNativeMethod {
-jlong JNICALL TransformNativeMethod(JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED) {
+jlong JNICALL TransformNativeMethod([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass) {
return 42;
}
extern "C" JNIEXPORT void JNICALL Java_art_Test2035_LinkClassMethods(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jclass target) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jclass target) {
JNINativeMethod meth{"getValue", "()J", reinterpret_cast<void*>(TransformNativeMethod)};
env->RegisterNatives(target, &meth, 1);
}
diff --git a/test/2040-huge-native-alloc/huge_native_buf.cc b/test/2040-huge-native-alloc/huge_native_buf.cc
index 20f629a..71675b2 100644
--- a/test/2040-huge-native-alloc/huge_native_buf.cc
+++ b/test/2040-huge-native-alloc/huge_native_buf.cc
@@ -26,18 +26,18 @@
static constexpr size_t HUGE_SIZE = 10'000'000;
extern "C" JNIEXPORT jobject JNICALL Java_Main_getHugeNativeBuffer(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
char* buffer = new char[HUGE_SIZE];
return env->NewDirectByteBuffer(buffer, HUGE_SIZE);
}
extern "C" JNIEXPORT void JNICALL Java_Main_deleteHugeNativeBuffer(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject jbuffer) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject jbuffer) {
delete [] static_cast<char*>(env->GetDirectBufferAddress(jbuffer));
}
extern "C" JNIEXPORT jint JNICALL Java_Main_getGcNum(
- JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass) {
return Runtime::Current()->GetHeap()->GetCurrentGcNum();
}
diff --git a/test/2243-single-step-default/single_step_helper.cc b/test/2243-single-step-default/single_step_helper.cc
index 432e982..3b2d0bd 100644
--- a/test/2243-single-step-default/single_step_helper.cc
+++ b/test/2243-single-step-default/single_step_helper.cc
@@ -29,7 +29,7 @@
JNIEnv* env,
jthread thr,
jmethodID method,
- jlocation location ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jlocation location) {
// We haven't reached the default method yet. Continue single stepping
if (method != interface_default_method) {
return;
@@ -99,14 +99,14 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test2243_enableSingleStep(JNIEnv* env,
- jclass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass cl,
jthread thr) {
jvmtiError err = jvmti_env->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_SINGLE_STEP, thr);
JvmtiErrorToException(env, jvmti_env, err);
}
extern "C" JNIEXPORT void JNICALL Java_art_Test2243_setSingleStepUntil(JNIEnv* env,
- jclass cl ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass cl,
jobject method) {
interface_default_method = env->FromReflectedMethod(method);
}
diff --git a/test/2262-miranda-methods/jni_invoke.cc b/test/2262-miranda-methods/jni_invoke.cc
index da55f8b..8bef787 100644
--- a/test/2262-miranda-methods/jni_invoke.cc
+++ b/test/2262-miranda-methods/jni_invoke.cc
@@ -22,7 +22,7 @@
namespace art {
extern "C" JNIEXPORT void JNICALL
-Java_Main_CallNonvirtual(JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jobject o, jclass c, jmethodID m) {
+Java_Main_CallNonvirtual(JNIEnv* env, [[maybe_unused]] jclass k, jobject o, jclass c, jmethodID m) {
env->CallNonvirtualVoidMethod(o, c, m);
}
diff --git a/test/305-other-fault-handler/fault_handler.cc b/test/305-other-fault-handler/fault_handler.cc
index db3f1f4..240827b 100644
--- a/test/305-other-fault-handler/fault_handler.cc
+++ b/test/305-other-fault-handler/fault_handler.cc
@@ -52,7 +52,7 @@
manager_->RemoveHandler(this);
}
- bool Action(int sig, siginfo_t* siginfo, void* context ATTRIBUTE_UNUSED) override {
+ bool Action(int sig, siginfo_t* siginfo, [[maybe_unused]] void* context) override {
CHECK_EQ(sig, SIGSEGV);
CHECK_EQ(reinterpret_cast<uint32_t*>(siginfo->si_addr),
GetTargetPointer()) << "Segfault on unexpected address!";
diff --git a/test/457-regs/regs_jni.cc b/test/457-regs/regs_jni.cc
index 80abb3b..76351ec 100644
--- a/test/457-regs/regs_jni.cc
+++ b/test/457-regs/regs_jni.cc
@@ -127,7 +127,7 @@
};
extern "C" JNIEXPORT void JNICALL Java_PhiLiveness_regsNativeCall(
- JNIEnv*, jclass value ATTRIBUTE_UNUSED) {
+ JNIEnv*, [[maybe_unused]] jclass value) {
ScopedObjectAccess soa(Thread::Current());
std::unique_ptr<Context> context(Context::Create());
TestVisitor visitor(soa.Self(), context.get());
@@ -136,7 +136,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_PhiLiveness_regsNativeCallWithParameters(
- JNIEnv*, jclass value ATTRIBUTE_UNUSED, jobject main, jint int_value, jfloat float_value) {
+ JNIEnv*, [[maybe_unused]] jclass value, jobject main, jint int_value, jfloat float_value) {
ScopedObjectAccess soa(Thread::Current());
std::unique_ptr<Context> context(Context::Create());
CHECK(soa.Decode<mirror::Object>(main) == nullptr);
diff --git a/test/597-deopt-new-string/deopt.cc b/test/597-deopt-new-string/deopt.cc
index 06dbca6..b882815 100644
--- a/test/597-deopt-new-string/deopt.cc
+++ b/test/597-deopt-new-string/deopt.cc
@@ -28,7 +28,7 @@
extern "C" JNIEXPORT void JNICALL Java_Main_deoptimizeAll(
JNIEnv* env,
- jclass cls ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass cls) {
ScopedObjectAccess soa(env);
ScopedThreadSuspension sts(Thread::Current(), ThreadState::kWaitingForDeoptimization);
gc::ScopedGCCriticalSection gcs(Thread::Current(),
@@ -41,7 +41,7 @@
extern "C" JNIEXPORT void JNICALL Java_Main_undeoptimizeAll(
JNIEnv* env,
- jclass cls ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass cls) {
ScopedObjectAccess soa(env);
ScopedThreadSuspension sts(Thread::Current(), ThreadState::kWaitingForDeoptimization);
gc::ScopedGCCriticalSection gcs(Thread::Current(),
diff --git a/test/720-thread-priority/thread_priority.cc b/test/720-thread-priority/thread_priority.cc
index db4a2b2..519a0a1 100644
--- a/test/720-thread-priority/thread_priority.cc
+++ b/test/720-thread-priority/thread_priority.cc
@@ -22,7 +22,7 @@
#include "jni.h"
extern "C" JNIEXPORT jint JNICALL Java_Main_getThreadPlatformPriority(
- JNIEnv* env ATTRIBUTE_UNUSED,
- jclass clazz ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass clazz) {
return getpriority(PRIO_PROCESS, art::GetTid());
}
diff --git a/test/900-hello-plugin/load_unload.cc b/test/900-hello-plugin/load_unload.cc
index 7121d10..1d83d09 100644
--- a/test/900-hello-plugin/load_unload.cc
+++ b/test/900-hello-plugin/load_unload.cc
@@ -30,7 +30,7 @@
constexpr uintptr_t ENV_VALUE = 900;
// Allow this library to be used as a plugin too so we can test the stack.
-static jint GetEnvHandler(JavaVMExt* vm ATTRIBUTE_UNUSED, void** new_env, jint version) {
+static jint GetEnvHandler([[maybe_unused]] JavaVMExt* vm, void** new_env, jint version) {
printf("%s called in test 900\n", __func__);
if (version != TEST_900_ENV_VERSION_NUMBER) {
return JNI_EVERSION;
@@ -53,7 +53,7 @@
extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* vm,
char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* reserved) {
printf("Agent_OnLoad called with options \"%s\"\n", options);
if (strcmp("test_900_round_2", options) == 0) {
return 0;
@@ -67,7 +67,7 @@
return 0;
}
-extern "C" JNIEXPORT void JNICALL Agent_OnUnload(JavaVM* vm ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT void JNICALL Agent_OnUnload([[maybe_unused]] JavaVM* vm) {
printf("Agent_OnUnload called\n");
}
diff --git a/test/901-hello-ti-agent/basics.cc b/test/901-hello-ti-agent/basics.cc
index 43a1d83..59212de 100644
--- a/test/901-hello-ti-agent/basics.cc
+++ b/test/901-hello-ti-agent/basics.cc
@@ -44,14 +44,14 @@
return out;
}
-static void JNICALL VMStartCallback(jvmtiEnv *jenv, JNIEnv* jni_env ATTRIBUTE_UNUSED) {
+static void JNICALL VMStartCallback(jvmtiEnv *jenv, [[maybe_unused]] JNIEnv* jni_env) {
printf("VMStart (phase %d)\n", getPhase(jenv));
fsync(1);
}
static void JNICALL VMInitCallback(jvmtiEnv *jvmti_env,
- JNIEnv* jni_env ATTRIBUTE_UNUSED,
- jthread thread ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] JNIEnv* jni_env,
+ [[maybe_unused]] jthread thread) {
printf("VMInit (phase %d)\n", getPhase(jvmti_env));
fsync(1);
}
@@ -83,8 +83,8 @@
}
jint OnLoad(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
printf("Loaded Agent for test 901-hello-ti-agent\n");
fsync(1);
jvmtiEnv* env = nullptr;
@@ -157,14 +157,14 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test901_setVerboseFlag(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jint iflag, jboolean val) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jint iflag, jboolean val) {
jvmtiVerboseFlag flag = static_cast<jvmtiVerboseFlag>(iflag);
jvmtiError result = jvmti_env->SetVerboseFlag(flag, val);
JvmtiErrorToException(env, jvmti_env, result);
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test901_checkLivePhase(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jvmtiPhase current_phase;
jvmtiError phase_result = jvmti_env->GetPhase(¤t_phase);
if (JvmtiErrorToException(env, jvmti_env, phase_result)) {
@@ -180,7 +180,7 @@
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test901_checkUnattached(
- JNIEnv* env ATTRIBUTE_UNUSED, jclass Main_klass) {
+ [[maybe_unused]] JNIEnv* env, jclass Main_klass) {
jvmtiError res = JVMTI_ERROR_NONE;
std::thread t1(CallJvmtiFunction, jvmti_env, Main_klass, &res);
t1.join();
@@ -188,7 +188,7 @@
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test901_getErrorName(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jint error) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jint error) {
char* name;
jvmtiError res = jvmti_env->GetErrorName(static_cast<jvmtiError>(error), &name);
if (JvmtiErrorToException(env, jvmti_env, res)) {
diff --git a/test/903-hello-tagging/tagging.cc b/test/903-hello-tagging/tagging.cc
index e0a0136..a5eb49c 100644
--- a/test/903-hello-tagging/tagging.cc
+++ b/test/903-hello-tagging/tagging.cc
@@ -134,7 +134,7 @@
}
extern "C" JNIEXPORT jlongArray JNICALL Java_art_Test903_testTagsInDifferentEnvs(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jlong base_tag, jint count) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jlong base_tag, jint count) {
std::unique_ptr<jvmtiEnv*[]> envs = std::unique_ptr<jvmtiEnv*[]>(new jvmtiEnv*[count]);
envs[0] = jvmti_env;
for (int32_t i = 1; i != count; ++i) {
diff --git a/test/904-object-allocation/tracking.cc b/test/904-object-allocation/tracking.cc
index abb6083..4b14932 100644
--- a/test/904-object-allocation/tracking.cc
+++ b/test/904-object-allocation/tracking.cc
@@ -84,7 +84,7 @@
static std::mutex gEventsMutex;
static std::vector<EventLog> gEvents;
-static void JNICALL ObjectAllocated(jvmtiEnv* ti_env ATTRIBUTE_UNUSED,
+static void JNICALL ObjectAllocated([[maybe_unused]] jvmtiEnv* ti_env,
JNIEnv* jni_env,
jthread thread,
jobject object,
@@ -99,7 +99,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test904_setupObjectAllocCallback(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jboolean enable) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jboolean enable) {
env->GetJavaVM(&vm);
jvmtiEventCallbacks callbacks;
memset(&callbacks, 0, sizeof(jvmtiEventCallbacks));
@@ -119,7 +119,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test904_getTrackingEventMessages(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jobjectArray threads) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jobjectArray threads) {
std::lock_guard<std::mutex> guard(gEventsMutex);
std::vector<std::string> real_events;
std::vector<jthread> thread_lst;
diff --git a/test/905-object-free/tracking_free.cc b/test/905-object-free/tracking_free.cc
index d85d9d3..ae93322 100644
--- a/test/905-object-free/tracking_free.cc
+++ b/test/905-object-free/tracking_free.cc
@@ -64,7 +64,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test905_setupObjectFreeCallback(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
setupObjectFreeCallback(env, jvmti_env, ObjectFree1);
JavaVM* jvm = nullptr;
env->GetJavaVM(&jvm);
@@ -74,7 +74,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test905_enableFreeTracking(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jboolean enable) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jboolean enable) {
jvmtiError ret = jvmti_env->SetEventNotificationMode(
enable ? JVMTI_ENABLE : JVMTI_DISABLE,
JVMTI_EVENT_OBJECT_FREE,
@@ -90,7 +90,7 @@
}
extern "C" JNIEXPORT jlongArray JNICALL Java_art_Test905_getCollectedTags(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint index) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jint index) {
std::lock_guard<std::mutex> mu((index == 0) ? ct1_mutex : ct2_mutex);
std::vector<jlong>& tags = (index == 0) ? collected_tags1 : collected_tags2;
jlongArray ret = env->NewLongArray(tags.size());
@@ -105,7 +105,7 @@
}
extern "C" JNIEXPORT jlong JNICALL Java_art_Test905_getTag2(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj) {
jlong tag;
jvmtiError ret = jvmti_env2->GetTag(obj, &tag);
JvmtiErrorToException(env, jvmti_env, ret);
@@ -113,7 +113,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test905_setTag2(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jlong tag) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jlong tag) {
jvmtiError ret = jvmti_env2->SetTag(obj, tag);
JvmtiErrorToException(env, jvmti_env, ret);
}
diff --git a/test/906-iterate-heap/iterate_heap.cc b/test/906-iterate-heap/iterate_heap.cc
index f0a6624..b226eb0 100644
--- a/test/906-iterate-heap/iterate_heap.cc
+++ b/test/906-iterate-heap/iterate_heap.cc
@@ -73,7 +73,7 @@
extern "C" JNIEXPORT jint JNICALL Java_art_Test906_iterateThroughHeapCount(
JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jint heap_filter,
jclass klass_filter,
jint stop_after) {
@@ -84,10 +84,10 @@
stop_after(_stop_after) {
}
- jint Handle(jlong class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
- jlong* tag_ptr ATTRIBUTE_UNUSED,
- jint length ATTRIBUTE_UNUSED) override {
+ jint Handle([[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong size,
+ [[maybe_unused]] jlong* tag_ptr,
+ [[maybe_unused]] jint length) override {
counter++;
if (counter == stop_after) {
return JVMTI_VISIT_ABORT;
@@ -111,7 +111,7 @@
extern "C" JNIEXPORT jint JNICALL Java_art_Test906_iterateThroughHeapData(
JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jint heap_filter,
jclass klass_filter,
jlongArray class_tags,
@@ -156,15 +156,15 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test906_iterateThroughHeapAdd(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint heap_filter, jclass klass_filter) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jint heap_filter, jclass klass_filter) {
class AddIterationConfig : public IterationConfig {
public:
AddIterationConfig() {}
- jint Handle(jlong class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
+ jint Handle([[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong size,
jlong* tag_ptr,
- jint length ATTRIBUTE_UNUSED) override {
+ [[maybe_unused]] jint length) override {
jlong current_tag = *tag_ptr;
if (current_tag != 0) {
*tag_ptr = current_tag + 10;
@@ -178,15 +178,15 @@
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test906_iterateThroughHeapString(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag) {
struct FindStringCallbacks {
explicit FindStringCallbacks(jlong t) : tag_to_find(t) {}
- static jint JNICALL HeapIterationCallback(jlong class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
- jlong* tag_ptr ATTRIBUTE_UNUSED,
- jint length ATTRIBUTE_UNUSED,
- void* user_data ATTRIBUTE_UNUSED) {
+ static jint JNICALL HeapIterationCallback([[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong size,
+ [[maybe_unused]] jlong* tag_ptr,
+ [[maybe_unused]] jint length,
+ [[maybe_unused]] void* user_data) {
return 0;
}
@@ -234,15 +234,15 @@
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test906_iterateThroughHeapPrimitiveArray(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag) {
struct FindArrayCallbacks {
explicit FindArrayCallbacks(jlong t) : tag_to_find(t) {}
- static jint JNICALL HeapIterationCallback(jlong class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
- jlong* tag_ptr ATTRIBUTE_UNUSED,
- jint length ATTRIBUTE_UNUSED,
- void* user_data ATTRIBUTE_UNUSED) {
+ static jint JNICALL HeapIterationCallback([[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong size,
+ [[maybe_unused]] jlong* tag_ptr,
+ [[maybe_unused]] jint length,
+ [[maybe_unused]] void* user_data) {
return 0;
}
@@ -345,15 +345,15 @@
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test906_iterateThroughHeapPrimitiveFields(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag) {
struct FindFieldCallbacks {
explicit FindFieldCallbacks(jlong t) : tag_to_find(t) {}
- static jint JNICALL HeapIterationCallback(jlong class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
- jlong* tag_ptr ATTRIBUTE_UNUSED,
- jint length ATTRIBUTE_UNUSED,
- void* user_data ATTRIBUTE_UNUSED) {
+ static jint JNICALL HeapIterationCallback([[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong size,
+ [[maybe_unused]] jlong* tag_ptr,
+ [[maybe_unused]] jint length,
+ [[maybe_unused]] void* user_data) {
return 0;
}
diff --git a/test/907-get-loaded-classes/get_loaded_classes.cc b/test/907-get-loaded-classes/get_loaded_classes.cc
index 87c98e1..58addd1 100644
--- a/test/907-get-loaded-classes/get_loaded_classes.cc
+++ b/test/907-get-loaded-classes/get_loaded_classes.cc
@@ -41,7 +41,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test907_getLoadedClasses(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
jint count = -1;
jclass* classes = nullptr;
jvmtiError result = jvmti_env->GetLoadedClasses(&count, &classes);
diff --git a/test/908-gc-start-finish/gc_callbacks.cc b/test/908-gc-start-finish/gc_callbacks.cc
index ddd2ba7..e839a22 100644
--- a/test/908-gc-start-finish/gc_callbacks.cc
+++ b/test/908-gc-start-finish/gc_callbacks.cc
@@ -32,16 +32,16 @@
static size_t starts = 0;
static size_t finishes = 0;
-static void JNICALL GarbageCollectionFinish(jvmtiEnv* ti_env ATTRIBUTE_UNUSED) {
+static void JNICALL GarbageCollectionFinish([[maybe_unused]] jvmtiEnv* ti_env) {
finishes++;
}
-static void JNICALL GarbageCollectionStart(jvmtiEnv* ti_env ATTRIBUTE_UNUSED) {
+static void JNICALL GarbageCollectionStart([[maybe_unused]] jvmtiEnv* ti_env) {
starts++;
}
extern "C" JNIEXPORT void JNICALL Java_art_Test908_setupGcCallback(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
jvmtiEventCallbacks callbacks;
memset(&callbacks, 0, sizeof(jvmtiEventCallbacks));
callbacks.GarbageCollectionFinish = GarbageCollectionFinish;
@@ -52,7 +52,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test908_enableGcTracking(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jboolean enable) {
jvmtiError ret = jvmti_env->SetEventNotificationMode(
enable ? JVMTI_ENABLE : JVMTI_DISABLE,
@@ -70,15 +70,15 @@
}
}
-extern "C" JNIEXPORT jint JNICALL Java_art_Test908_getGcStarts(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT jint JNICALL Java_art_Test908_getGcStarts([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
jint result = static_cast<jint>(starts);
starts = 0;
return result;
}
-extern "C" JNIEXPORT jint JNICALL Java_art_Test908_getGcFinishes(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT jint JNICALL Java_art_Test908_getGcFinishes([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
jint result = static_cast<jint>(finishes);
finishes = 0;
return result;
diff --git a/test/909-attach-agent/attach.cc b/test/909-attach-agent/attach.cc
index 50ab26a..56d6c0f 100644
--- a/test/909-attach-agent/attach.cc
+++ b/test/909-attach-agent/attach.cc
@@ -35,8 +35,8 @@
static constexpr jint kArtTiVersion = JVMTI_VERSION_1_2 | 0x40000000;
jint OnAttach(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
Println("Attached Agent for test 909-attach-agent");
jvmtiEnv* env = nullptr;
jvmtiEnv* env2 = nullptr;
diff --git a/test/910-methods/methods.cc b/test/910-methods/methods.cc
index 9c726e1..473e875 100644
--- a/test/910-methods/methods.cc
+++ b/test/910-methods/methods.cc
@@ -31,7 +31,7 @@
namespace Test910Methods {
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test910_getMethodName(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
char* name;
@@ -74,7 +74,7 @@
}
extern "C" JNIEXPORT jclass JNICALL Java_art_Test910_getMethodDeclaringClass(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jclass declaring_class;
@@ -87,7 +87,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getMethodModifiers(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jint modifiers;
@@ -100,7 +100,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getMaxLocals(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jint max_locals;
@@ -113,7 +113,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getArgumentsSize(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jint arguments;
@@ -126,7 +126,7 @@
}
extern "C" JNIEXPORT jlong JNICALL Java_art_Test910_getMethodLocationStart(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jlong start;
@@ -140,7 +140,7 @@
}
extern "C" JNIEXPORT jlong JNICALL Java_art_Test910_getMethodLocationEnd(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jlong start;
@@ -154,7 +154,7 @@
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test910_isMethodNative(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jboolean is_native;
@@ -167,7 +167,7 @@
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test910_isMethodObsolete(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jboolean is_obsolete;
@@ -180,7 +180,7 @@
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test910_isMethodSynthetic(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) {
jmethodID id = env->FromReflectedMethod(method);
jboolean is_synthetic;
diff --git a/test/911-get-stack-trace/stack_trace.cc b/test/911-get-stack-trace/stack_trace.cc
index 2b620b1..3b8210a 100644
--- a/test/911-get-stack-trace/stack_trace.cc
+++ b/test/911-get-stack-trace/stack_trace.cc
@@ -126,7 +126,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_PrintThread_getStackTrace(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thread, jint start, jint max) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thread, jint start, jint max) {
std::unique_ptr<jvmtiFrameInfo[]> frames(new jvmtiFrameInfo[max]);
jint count;
@@ -141,7 +141,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_AllTraces_getAllStackTraces(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint max) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jint max) {
jint thread_count;
jvmtiStackInfo* stack_infos;
{
@@ -169,7 +169,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_ThreadListTraces_getThreadListStackTraces(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobjectArray jthreads, jint max) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobjectArray jthreads, jint max) {
jint thread_count = env->GetArrayLength(jthreads);
std::unique_ptr<jthread[]> threads(new jthread[thread_count]);
for (jint i = 0; i != thread_count; ++i) {
@@ -205,7 +205,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Frames_getFrameCount(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thread) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thread) {
jint count;
jvmtiError result = jvmti_env->GetFrameCount(thread, &count);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -215,7 +215,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Frames_getFrameLocation(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thread, jint depth) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thread, jint depth) {
jmethodID method;
jlocation location;
diff --git a/test/912-classes/classes.cc b/test/912-classes/classes.cc
index ff50223..8fa41f9 100644
--- a/test/912-classes/classes.cc
+++ b/test/912-classes/classes.cc
@@ -37,7 +37,7 @@
namespace Test912Classes {
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isModifiableClass(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
jboolean res = JNI_FALSE;
jvmtiError result = jvmti_env->IsModifiableClass(klass, &res);
JvmtiErrorToException(env, jvmti_env, result);
@@ -45,7 +45,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassSignature(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
char* sig;
char* gen;
jvmtiError result = jvmti_env->GetClassSignature(klass, &sig, &gen);
@@ -74,7 +74,7 @@
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isInterface(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
jboolean is_interface = JNI_FALSE;
jvmtiError result = jvmti_env->IsInterface(klass, &is_interface);
JvmtiErrorToException(env, jvmti_env, result);
@@ -82,7 +82,7 @@
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isArrayClass(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
jboolean is_array_class = JNI_FALSE;
jvmtiError result = jvmti_env->IsArrayClass(klass, &is_array_class);
JvmtiErrorToException(env, jvmti_env, result);
@@ -90,7 +90,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Test912_getClassModifiers(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
jint mod;
jvmtiError result = jvmti_env->GetClassModifiers(klass, &mod);
JvmtiErrorToException(env, jvmti_env, result);
@@ -98,7 +98,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassFields(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
jint count = 0;
jfieldID* fields = nullptr;
jvmtiError result = jvmti_env->GetClassFields(klass, &count, &fields);
@@ -123,7 +123,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassMethods(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
jint count = 0;
jmethodID* methods = nullptr;
jvmtiError result = jvmti_env->GetClassMethods(klass, &count, &methods);
@@ -148,7 +148,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getImplementedInterfaces(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
jint count = 0;
jclass* classes = nullptr;
jvmtiError result = jvmti_env->GetImplementedInterfaces(klass, &count, &classes);
@@ -167,7 +167,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Test912_getClassStatus(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass , jclass klass) {
jint status;
jvmtiError result = jvmti_env->GetClassStatus(klass, &status);
JvmtiErrorToException(env, jvmti_env, result);
@@ -175,7 +175,7 @@
}
extern "C" JNIEXPORT jobject JNICALL Java_art_Test912_getClassLoader(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass , jclass klass) {
jobject classloader;
jvmtiError result = jvmti_env->GetClassLoader(klass, &classloader);
JvmtiErrorToException(env, jvmti_env, result);
@@ -183,7 +183,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassLoaderClasses(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jobject jclassloader) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass , jobject jclassloader) {
jint count = 0;
jclass* classes = nullptr;
jvmtiError result = jvmti_env->GetClassLoaderClasses(jclassloader, &count, &classes);
@@ -202,7 +202,7 @@
}
extern "C" JNIEXPORT jintArray JNICALL Java_art_Test912_getClassVersion(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass , jclass klass) {
jint major, minor;
jvmtiError result = jvmti_env->GetClassVersionNumbers(klass, &minor, &major);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -280,7 +280,7 @@
static std::vector<std::string> gEvents;
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassLoadMessages(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
std::lock_guard<std::mutex> guard(gEventsMutex);
jobjectArray ret = CreateObjectArray(env,
static_cast<jint>(gEvents.size()),
@@ -365,7 +365,7 @@
std::string ClassLoadPreparePrinter::thread_name_filter_; // NOLINT [runtime/string] [4]
extern "C" JNIEXPORT void JNICALL Java_art_Test912_enableClassLoadPreparePrintEvents(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jboolean enable, jthread thread) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass , jboolean enable, jthread thread) {
if (thread != nullptr) {
ClassLoadPreparePrinter::thread_name_filter_ =
ClassLoadPreparePrinter::GetThreadName(jvmti_env, env, thread);
@@ -432,7 +432,7 @@
static constexpr const char* kWeakInitSig = "(Ljava/lang/Object;)V";
static constexpr const char* kWeakGetSig = "()Ljava/lang/Object;";
- static void AgentThreadTest(jvmtiEnv* jvmti ATTRIBUTE_UNUSED,
+ static void AgentThreadTest([[maybe_unused]] jvmtiEnv* jvmti,
JNIEnv* env,
jobject* obj_global) {
jobject target = *obj_global;
@@ -449,7 +449,7 @@
static void JNICALL ClassLoadCallback(jvmtiEnv* jenv,
JNIEnv* jni_env,
- jthread thread ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jthread thread,
jclass klass) {
std::string name = GetClassName(jenv, jni_env, klass);
if (name == kClassName) {
@@ -470,7 +470,7 @@
static void JNICALL ClassPrepareCallback(jvmtiEnv* jenv,
JNIEnv* jni_env,
- jthread thread ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jthread thread,
jclass klass) {
std::string name = GetClassName(jenv, jni_env, klass);
if (name == kClassName) {
@@ -577,13 +577,13 @@
bool ClassLoadPrepareEquality::compared_ = false;
extern "C" JNIEXPORT void JNICALL Java_art_Test912_setEqualityEventStorageClass(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) {
ClassLoadPrepareEquality::storage_class_ =
reinterpret_cast<jclass>(env->NewGlobalRef(klass));
}
extern "C" JNIEXPORT void JNICALL Java_art_Test912_enableClassLoadPrepareEqualityEvents(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jboolean b) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jboolean b) {
EnableEvents(env,
b,
ClassLoadPrepareEquality::ClassLoadCallback,
@@ -599,17 +599,17 @@
// Global to pass information to the ClassPrepare event.
static jobject gRunnableGlobal = nullptr;
extern "C" JNIEXPORT void JNICALL Java_art_Test912_runRecursiveClassPrepareEvents(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject runnable) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject runnable) {
CHECK(gRunnableGlobal == nullptr);
gRunnableGlobal = env->NewGlobalRef(runnable);
EnableEvents(
env,
true,
nullptr,
- [](jvmtiEnv* jenv ATTRIBUTE_UNUSED,
+ []([[maybe_unused]] jvmtiEnv* jenv,
JNIEnv* jni_env,
- jthread thread ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) -> void {
+ [[maybe_unused]] jthread thread,
+ [[maybe_unused]] jclass klass) -> void {
jclass runnable_class = jni_env->FindClass("java/lang/Runnable");
jni_env->CallVoidMethod(
gRunnableGlobal, jni_env->GetMethodID(runnable_class, "run", "()V"));
diff --git a/test/912-classes/classes_art.cc b/test/912-classes/classes_art.cc
index de2e456..a3b4d94 100644
--- a/test/912-classes/classes_art.cc
+++ b/test/912-classes/classes_art.cc
@@ -75,10 +75,10 @@
}
struct ClassLoadSeen {
- static void JNICALL ClassLoadSeenCallback(jvmtiEnv* jenv ATTRIBUTE_UNUSED,
- JNIEnv* jni_env ATTRIBUTE_UNUSED,
- jthread thread ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+ static void JNICALL ClassLoadSeenCallback([[maybe_unused]] jvmtiEnv* jenv,
+ [[maybe_unused]] JNIEnv* jni_env,
+ [[maybe_unused]] jthread thread,
+ [[maybe_unused]] jclass klass) {
saw_event = true;
}
@@ -87,17 +87,17 @@
bool ClassLoadSeen::saw_event = false;
extern "C" JNIEXPORT void JNICALL Java_art_Test912Art_enableClassLoadSeenEvents(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jboolean b) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jboolean b) {
EnableEvents(env, b, ClassLoadSeen::ClassLoadSeenCallback, nullptr);
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912Art_hadLoadEvent(
- JNIEnv* env ATTRIBUTE_UNUSED, jclass Main_klass ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
return ClassLoadSeen::saw_event ? JNI_TRUE : JNI_FALSE;
}
extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912Art_isLoadedClass(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring class_name) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring class_name) {
ScopedUtfChars name(env, class_name);
jint class_count;
diff --git a/test/913-heaps/heaps.cc b/test/913-heaps/heaps.cc
index 671cff8..e4c3223 100644
--- a/test/913-heaps/heaps.cc
+++ b/test/913-heaps/heaps.cc
@@ -48,7 +48,7 @@
static constexpr const char* kThreadReferree = "3000@0";
extern "C" JNIEXPORT void JNICALL Java_art_Test913_forceGarbageCollection(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
jvmtiError ret = jvmti_env->ForceGarbageCollection();
JvmtiErrorToException(env, jvmti_env, ret);
}
@@ -68,7 +68,7 @@
// Register a class (or general object) in the class-data map. The serial number is determined by
// the order of calls to this function (so stable Java code leads to stable numbering).
extern "C" JNIEXPORT void JNICALL Java_art_Test913_registerClass(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag, jobject obj) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag, jobject obj) {
ClassData data;
if (JvmtiErrorToException(env, jvmti_env, jvmti_env->GetObjectSize(obj, &data.size))) {
return;
@@ -139,7 +139,7 @@
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test913_followReferences(
JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jint heap_filter,
jclass klass_filter,
jobject initial_object,
@@ -162,7 +162,7 @@
jlong* tag_ptr,
jlong* referrer_tag_ptr,
jint length,
- void* user_data ATTRIBUTE_UNUSED) override {
+ [[maybe_unused]] void* user_data) override {
jlong tag = *tag_ptr;
// Ignore any jni-global roots with untagged classes. These can be from the environment,
@@ -570,18 +570,18 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test913_followReferencesString(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject initial_object) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jobject initial_object) {
struct FindStringCallbacks {
static jint JNICALL FollowReferencesCallback(
- jvmtiHeapReferenceKind reference_kind ATTRIBUTE_UNUSED,
- const jvmtiHeapReferenceInfo* reference_info ATTRIBUTE_UNUSED,
- jlong class_tag ATTRIBUTE_UNUSED,
- jlong referrer_class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
- jlong* tag_ptr ATTRIBUTE_UNUSED,
- jlong* referrer_tag_ptr ATTRIBUTE_UNUSED,
- jint length ATTRIBUTE_UNUSED,
- void* user_data ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jvmtiHeapReferenceKind reference_kind,
+ [[maybe_unused]] const jvmtiHeapReferenceInfo* reference_info,
+ [[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong referrer_class_tag,
+ [[maybe_unused]] jlong size,
+ [[maybe_unused]] jlong* tag_ptr,
+ [[maybe_unused]] jlong* referrer_tag_ptr,
+ [[maybe_unused]] jint length,
+ [[maybe_unused]] void* user_data) {
return JVMTI_VISIT_OBJECTS; // Continue visiting.
}
@@ -633,18 +633,18 @@
extern "C" JNIEXPORT jstring JNICALL Java_art_Test913_followReferencesPrimitiveArray(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject initial_object) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jobject initial_object) {
struct FindArrayCallbacks {
static jint JNICALL FollowReferencesCallback(
- jvmtiHeapReferenceKind reference_kind ATTRIBUTE_UNUSED,
- const jvmtiHeapReferenceInfo* reference_info ATTRIBUTE_UNUSED,
- jlong class_tag ATTRIBUTE_UNUSED,
- jlong referrer_class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
- jlong* tag_ptr ATTRIBUTE_UNUSED,
- jlong* referrer_tag_ptr ATTRIBUTE_UNUSED,
- jint length ATTRIBUTE_UNUSED,
- void* user_data ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jvmtiHeapReferenceKind reference_kind,
+ [[maybe_unused]] const jvmtiHeapReferenceInfo* reference_info,
+ [[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong referrer_class_tag,
+ [[maybe_unused]] jlong size,
+ [[maybe_unused]] jlong* tag_ptr,
+ [[maybe_unused]] jlong* referrer_tag_ptr,
+ [[maybe_unused]] jint length,
+ [[maybe_unused]] void* user_data) {
return JVMTI_VISIT_OBJECTS; // Continue visiting.
}
@@ -749,18 +749,18 @@
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test913_followReferencesPrimitiveFields(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject initial_object) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jobject initial_object) {
struct FindFieldCallbacks {
static jint JNICALL FollowReferencesCallback(
- jvmtiHeapReferenceKind reference_kind ATTRIBUTE_UNUSED,
- const jvmtiHeapReferenceInfo* reference_info ATTRIBUTE_UNUSED,
- jlong class_tag ATTRIBUTE_UNUSED,
- jlong referrer_class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
- jlong* tag_ptr ATTRIBUTE_UNUSED,
- jlong* referrer_tag_ptr ATTRIBUTE_UNUSED,
- jint length ATTRIBUTE_UNUSED,
- void* user_data ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jvmtiHeapReferenceKind reference_kind,
+ [[maybe_unused]] const jvmtiHeapReferenceInfo* reference_info,
+ [[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong referrer_class_tag,
+ [[maybe_unused]] jlong size,
+ [[maybe_unused]] jlong* tag_ptr,
+ [[maybe_unused]] jlong* referrer_tag_ptr,
+ [[maybe_unused]] jint length,
+ [[maybe_unused]] void* user_data) {
return JVMTI_VISIT_OBJECTS; // Continue visiting.
}
@@ -823,16 +823,16 @@
static size_t starts = 0;
static size_t finishes = 0;
-static void JNICALL GarbageCollectionFinish(jvmtiEnv* ti_env ATTRIBUTE_UNUSED) {
+static void JNICALL GarbageCollectionFinish([[maybe_unused]] jvmtiEnv* ti_env) {
finishes++;
}
-static void JNICALL GarbageCollectionStart(jvmtiEnv* ti_env ATTRIBUTE_UNUSED) {
+static void JNICALL GarbageCollectionStart([[maybe_unused]] jvmtiEnv* ti_env) {
starts++;
}
extern "C" JNIEXPORT void JNICALL Java_art_Test913_setupGcCallback(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
jvmtiEventCallbacks callbacks;
memset(&callbacks, 0, sizeof(jvmtiEventCallbacks));
callbacks.GarbageCollectionFinish = GarbageCollectionFinish;
@@ -843,7 +843,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test913_enableGcTracking(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jboolean enable) {
jvmtiError ret = jvmti_env->SetEventNotificationMode(
enable ? JVMTI_ENABLE : JVMTI_DISABLE,
@@ -861,15 +861,15 @@
}
}
-extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getGcStarts(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getGcStarts([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
jint result = static_cast<jint>(starts);
starts = 0;
return result;
}
-extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getGcFinishes(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getGcFinishes([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
jint result = static_cast<jint>(finishes);
finishes = 0;
return result;
@@ -902,7 +902,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test913_checkForExtensionApis(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
jint extension_count;
jvmtiExtensionFunctionInfo* extensions;
jvmtiError result = jvmti_env->GetExtensionFunctions(&extension_count, &extensions);
@@ -993,7 +993,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getObjectHeapId(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jlong tag) {
CHECK(gGetObjectHeapIdFn != nullptr);
jint heap_id;
jvmtiError result = gGetObjectHeapIdFn(jvmti_env, tag, &heap_id);
@@ -1002,7 +1002,7 @@
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test913_getHeapName(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint heap_id) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jint heap_id) {
CHECK(gGetHeapNameFn != nullptr);
char* heap_name;
jvmtiError result = gGetHeapNameFn(jvmti_env, heap_id, &heap_name);
@@ -1015,20 +1015,20 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test913_checkGetObjectHeapIdInCallback(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag, jint heap_id) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jlong tag, jint heap_id) {
CHECK(gGetObjectHeapIdFn != nullptr);
{
struct GetObjectHeapIdCallbacks {
static jint JNICALL FollowReferencesCallback(
- jvmtiHeapReferenceKind reference_kind ATTRIBUTE_UNUSED,
- const jvmtiHeapReferenceInfo* reference_info ATTRIBUTE_UNUSED,
- jlong class_tag ATTRIBUTE_UNUSED,
- jlong referrer_class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jvmtiHeapReferenceKind reference_kind,
+ [[maybe_unused]] const jvmtiHeapReferenceInfo* reference_info,
+ [[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong referrer_class_tag,
+ [[maybe_unused]] jlong size,
jlong* tag_ptr,
- jlong* referrer_tag_ptr ATTRIBUTE_UNUSED,
- jint length ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jlong* referrer_tag_ptr,
+ [[maybe_unused]] jint length,
void* user_data) {
if (*tag_ptr != 0) {
GetObjectHeapIdCallbacks* p = reinterpret_cast<GetObjectHeapIdCallbacks*>(user_data);
@@ -1064,10 +1064,10 @@
{
struct GetObjectHeapIdCallbacks {
- static jint JNICALL HeapIterationCallback(jlong class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
+ static jint JNICALL HeapIterationCallback([[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong size,
jlong* tag_ptr,
- jint length ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jint length,
void* user_data) {
if (*tag_ptr != 0) {
GetObjectHeapIdCallbacks* p = reinterpret_cast<GetObjectHeapIdCallbacks*>(user_data);
@@ -1104,11 +1104,11 @@
static bool gFoundExt = false;
-static jint JNICALL HeapIterationExtCallback(jlong class_tag ATTRIBUTE_UNUSED,
- jlong size ATTRIBUTE_UNUSED,
+static jint JNICALL HeapIterationExtCallback([[maybe_unused]] jlong class_tag,
+ [[maybe_unused]] jlong size,
jlong* tag_ptr,
- jint length ATTRIBUTE_UNUSED,
- void* user_data ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jint length,
+ [[maybe_unused]] void* user_data,
jint heap_id) {
// We expect some tagged objects at or above the threshold, where the expected heap id is
// encoded into lowest byte.
@@ -1123,7 +1123,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test913_iterateThroughHeapExt(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
CHECK(gIterateThroughHeapExt != nullptr);
jvmtiHeapCallbacks callbacks;
diff --git a/test/920-objects/objects.cc b/test/920-objects/objects.cc
index 101ebb9..8fddc4a 100644
--- a/test/920-objects/objects.cc
+++ b/test/920-objects/objects.cc
@@ -28,7 +28,7 @@
namespace Test920Objects {
extern "C" JNIEXPORT jlong JNICALL Java_art_Test920_getObjectSize(
- JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED, jobject object) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass, jobject object) {
jlong size;
jvmtiError result = jvmti_env->GetObjectSize(object, &size);
@@ -44,7 +44,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Test920_getObjectHashCode(
- JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED, jobject object) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass, jobject object) {
jint hash;
jvmtiError result = jvmti_env->GetObjectHashCode(object, &hash);
diff --git a/test/922-properties/properties.cc b/test/922-properties/properties.cc
index 6af45f5..eed0a00 100644
--- a/test/922-properties/properties.cc
+++ b/test/922-properties/properties.cc
@@ -30,7 +30,7 @@
namespace Test922Properties {
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test922_getSystemProperties(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jint count;
char** properties;
jvmtiError result = jvmti_env->GetSystemProperties(&count, &properties);
@@ -55,7 +55,7 @@
}
extern "C" JNIEXPORT jstring JNICALL Java_art_Test922_getSystemProperty(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring key) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring key) {
ScopedUtfChars string(env, key);
if (string.c_str() == nullptr) {
return nullptr;
@@ -75,7 +75,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test922_setSystemProperty(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring key, jstring value) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring key, jstring value) {
ScopedUtfChars key_string(env, key);
if (key_string.c_str() == nullptr) {
return;
diff --git a/test/923-monitors/monitors.cc b/test/923-monitors/monitors.cc
index e4f3860..bfd00f8 100644
--- a/test/923-monitors/monitors.cc
+++ b/test/923-monitors/monitors.cc
@@ -38,7 +38,7 @@
}
extern "C" JNIEXPORT jlong JNICALL Java_art_Test923_createRawMonitor(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jrawMonitorID id;
jvmtiError result = jvmti_env->CreateRawMonitor("placeholder", &id);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -48,37 +48,37 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test923_destroyRawMonitor(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) {
jvmtiError result = jvmti_env->DestroyRawMonitor(LongToMonitor(l));
JvmtiErrorToException(env, jvmti_env, result);
}
extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorEnter(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) {
jvmtiError result = jvmti_env->RawMonitorEnter(LongToMonitor(l));
JvmtiErrorToException(env, jvmti_env, result);
}
extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorExit(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) {
jvmtiError result = jvmti_env->RawMonitorExit(LongToMonitor(l));
JvmtiErrorToException(env, jvmti_env, result);
}
extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorWait(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l, jlong millis) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l, jlong millis) {
jvmtiError result = jvmti_env->RawMonitorWait(LongToMonitor(l), millis);
JvmtiErrorToException(env, jvmti_env, result);
}
extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorNotify(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) {
jvmtiError result = jvmti_env->RawMonitorNotify(LongToMonitor(l));
JvmtiErrorToException(env, jvmti_env, result);
}
extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorNotifyAll(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) {
jvmtiError result = jvmti_env->RawMonitorNotifyAll(LongToMonitor(l));
JvmtiErrorToException(env, jvmti_env, result);
}
diff --git a/test/924-threads/threads.cc b/test/924-threads/threads.cc
index 8caff76..49f805c 100644
--- a/test/924-threads/threads.cc
+++ b/test/924-threads/threads.cc
@@ -41,7 +41,7 @@
};
extern "C" JNIEXPORT jlong JNICALL Java_art_Test924_nativeWaiterStructAlloc(
- JNIEnv* env, jclass TestClass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass TestClass) {
WaiterStruct* s = nullptr;
if (JvmtiErrorToException(env,
jvmti_env,
@@ -55,19 +55,19 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test924_nativeWaiterStructWaitForNative(
- JNIEnv* env ATTRIBUTE_UNUSED, jclass TestClass ATTRIBUTE_UNUSED, jlong waiter_struct) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass TestClass, jlong waiter_struct) {
WaiterStruct* s = reinterpret_cast<WaiterStruct*>(static_cast<intptr_t>(waiter_struct));
while (!s->started) { }
}
extern "C" JNIEXPORT void JNICALL Java_art_Test924_nativeWaiterStructFinish(
- JNIEnv* env ATTRIBUTE_UNUSED, jclass TestClass ATTRIBUTE_UNUSED, jlong waiter_struct) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass TestClass, jlong waiter_struct) {
WaiterStruct* s = reinterpret_cast<WaiterStruct*>(static_cast<intptr_t>(waiter_struct));
s->finish = true;
}
extern "C" JNIEXPORT void JNICALL Java_art_Test924_nativeLoop(JNIEnv* env,
- jclass TestClass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass TestClass,
jlong waiter_struct) {
WaiterStruct* s = reinterpret_cast<WaiterStruct*>(static_cast<intptr_t>(waiter_struct));
s->started = true;
@@ -79,7 +79,7 @@
// private static native Object[] getThreadInfo(Thread t);
extern "C" JNIEXPORT jthread JNICALL Java_art_Test924_getCurrentThread(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jthread thread = nullptr;
jvmtiError result = jvmti_env->GetCurrentThread(&thread);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -89,7 +89,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test924_getThreadInfo(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthread thread) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthread thread) {
jvmtiThreadInfo info;
memset(&info, 0, sizeof(jvmtiThreadInfo));
@@ -137,7 +137,7 @@
}
extern "C" JNIEXPORT jint JNICALL Java_art_Test924_getThreadState(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthread thread) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthread thread) {
jint state;
jvmtiError result = jvmti_env->GetThreadState(thread, &state);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -147,7 +147,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test924_getAllThreads(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jint thread_count;
jthread* threads;
@@ -167,7 +167,7 @@
}
extern "C" JNIEXPORT jlong JNICALL Java_art_Test924_getTLS(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthread thread) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthread thread) {
void* tls;
jvmtiError result = jvmti_env->GetThreadLocalStorage(thread, &tls);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -177,7 +177,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test924_setTLS(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthread thread, jlong val) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthread thread, jlong val) {
const void* tls = reinterpret_cast<void*>(static_cast<uintptr_t>(val));
jvmtiError result = jvmti_env->SetThreadLocalStorage(thread, tls);
JvmtiErrorToException(env, jvmti_env, result);
@@ -223,7 +223,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test924_enableThreadEvents(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jboolean b) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jboolean b) {
if (b == JNI_FALSE) {
jvmtiError ret = jvmti_env->SetEventNotificationMode(JVMTI_DISABLE,
JVMTI_EVENT_THREAD_START,
@@ -260,7 +260,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test924_getThreadEventMessages(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
std::lock_guard<std::mutex> guard(gEventsMutex);
jobjectArray ret = CreateObjectArray(env,
static_cast<jint>(gEvents.size()),
diff --git a/test/925-threadgroups/threadgroups.cc b/test/925-threadgroups/threadgroups.cc
index cc053bc..9154756 100644
--- a/test/925-threadgroups/threadgroups.cc
+++ b/test/925-threadgroups/threadgroups.cc
@@ -36,7 +36,7 @@
// private static native Object[] getThreadGroupChildren();
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test925_getTopThreadGroups(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jthreadGroup* groups;
jint group_count;
jvmtiError result = jvmti_env->GetTopThreadGroups(&group_count, &groups);
@@ -55,7 +55,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test925_getThreadGroupInfo(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthreadGroup group) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthreadGroup group) {
jvmtiThreadGroupInfo info;
jvmtiError result = jvmti_env->GetThreadGroupInfo(group, &info);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -87,7 +87,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test925_getThreadGroupChildren(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthreadGroup group) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthreadGroup group) {
jint thread_count;
jthread* threads;
jint threadgroup_count;
diff --git a/test/927-timers/timers.cc b/test/927-timers/timers.cc
index 9eaac71..bf20130 100644
--- a/test/927-timers/timers.cc
+++ b/test/927-timers/timers.cc
@@ -32,7 +32,7 @@
namespace Test926Timers {
extern "C" JNIEXPORT jint JNICALL Java_art_Test927_getAvailableProcessors(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jint count;
jvmtiError result = jvmti_env->GetAvailableProcessors(&count);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -42,7 +42,7 @@
}
extern "C" JNIEXPORT jlong JNICALL Java_art_Test927_getTime(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jlong time;
jvmtiError result = jvmti_env->GetTime(&time);
if (JvmtiErrorToException(env, jvmti_env, result)) {
@@ -52,7 +52,7 @@
}
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test927_getTimerInfo(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jvmtiTimerInfo info;
jvmtiError result = jvmti_env->GetTimerInfo(&info);
if (JvmtiErrorToException(env, jvmti_env, result)) {
diff --git a/test/929-search/search.cc b/test/929-search/search.cc
index 5516105..fb79c6f 100644
--- a/test/929-search/search.cc
+++ b/test/929-search/search.cc
@@ -31,7 +31,7 @@
namespace Test929Search {
extern "C" JNIEXPORT void JNICALL Java_Main_addToBootClassLoader(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring segment) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring segment) {
ScopedUtfChars utf(env, segment);
if (utf.c_str() == nullptr) {
return;
@@ -41,7 +41,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_Main_addToSystemClassLoader(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring segment) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring segment) {
ScopedUtfChars utf(env, segment);
if (utf.c_str() == nullptr) {
return;
diff --git a/test/931-agent-thread/agent_thread.cc b/test/931-agent-thread/agent_thread.cc
index 391df4e..16f6b27 100644
--- a/test/931-agent-thread/agent_thread.cc
+++ b/test/931-agent-thread/agent_thread.cc
@@ -90,7 +90,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test931_testAgentThread(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
// Create a Thread object.
ScopedLocalRef<jobject> thread_name(env, env->NewStringUTF("Agent Thread"));
if (thread_name.get() == nullptr) {
diff --git a/test/933-misc-events/misc_events.cc b/test/933-misc-events/misc_events.cc
index d2ae0f4..2182bc0 100644
--- a/test/933-misc-events/misc_events.cc
+++ b/test/933-misc-events/misc_events.cc
@@ -33,13 +33,13 @@
static std::atomic<bool> saw_dump_request(false);
-static void DumpRequestCallback(jvmtiEnv* jenv ATTRIBUTE_UNUSED) {
+static void DumpRequestCallback([[maybe_unused]] jvmtiEnv* jenv) {
printf("Received dump request.\n");
saw_dump_request.store(true, std::memory_order::memory_order_relaxed);
}
extern "C" JNIEXPORT void JNICALL Java_art_Test933_testSigQuit(
- JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass Main_klass) {
jvmtiEventCallbacks callbacks;
memset(&callbacks, 0, sizeof(jvmtiEventCallbacks));
callbacks.DataDumpRequest = DumpRequestCallback;
diff --git a/test/936-search-onload/search_onload.cc b/test/936-search-onload/search_onload.cc
index 23cea83..d693413 100644
--- a/test/936-search-onload/search_onload.cc
+++ b/test/936-search-onload/search_onload.cc
@@ -34,8 +34,8 @@
namespace Test936SearchOnload {
jint OnLoad(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0)) {
printf("Unable to get jvmti env!\n");
return 1;
diff --git a/test/945-obsolete-native/obsolete_native.cc b/test/945-obsolete-native/obsolete_native.cc
index 418ce90..29c7a80 100644
--- a/test/945-obsolete-native/obsolete_native.cc
+++ b/test/945-obsolete-native/obsolete_native.cc
@@ -32,7 +32,7 @@
namespace Test945ObsoleteNative {
extern "C" JNIEXPORT void JNICALL Java_art_Test945_00024Transform_doExecute(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject runnable) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject runnable) {
jclass runnable_klass = env->FindClass("java/lang/Runnable");
jmethodID run_method = env->GetMethodID(runnable_klass, "run", "()V");
env->CallVoidMethod(runnable, run_method);
diff --git a/test/980-redefine-object/redef_object.cc b/test/980-redefine-object/redef_object.cc
index a8393dc..7607718 100644
--- a/test/980-redefine-object/redef_object.cc
+++ b/test/980-redefine-object/redef_object.cc
@@ -38,10 +38,10 @@
static void JNICALL RedefineObjectHook(jvmtiEnv *jvmti_env,
JNIEnv* env,
- jclass class_being_redefined ATTRIBUTE_UNUSED,
- jobject loader ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass class_being_redefined,
+ [[maybe_unused]] jobject loader,
const char* name,
- jobject protection_domain ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject protection_domain,
jint class_data_len,
const unsigned char* class_data,
jint* new_class_data_len,
@@ -106,7 +106,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_Main_addMemoryTrackingCall(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jclass obj_class,
jthread thr) {
jvmtiCapabilities caps {.can_retransform_classes = 1};
diff --git a/test/983-source-transform-verify/source_transform.cc b/test/983-source-transform-verify/source_transform.cc
index 9e65a99..e778dbb 100644
--- a/test/983-source-transform-verify/source_transform.cc
+++ b/test/983-source-transform-verify/source_transform.cc
@@ -41,16 +41,16 @@
}
// The hook we are using.
-void JNICALL CheckDexFileHook(jvmtiEnv* jvmti_env ATTRIBUTE_UNUSED,
+void JNICALL CheckDexFileHook([[maybe_unused]] jvmtiEnv* jvmti_env,
JNIEnv* env,
jclass class_being_redefined,
- jobject loader ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject loader,
const char* name,
- jobject protection_domain ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject protection_domain,
jint class_data_len,
const unsigned char* class_data,
- jint* new_class_data_len ATTRIBUTE_UNUSED,
- unsigned char** new_class_data ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jint* new_class_data_len,
+ [[maybe_unused]] unsigned char** new_class_data) {
if (kSkipInitialLoad && class_being_redefined == nullptr) {
// Something got loaded concurrently. Just ignore it for now. To make sure the test is
// repeatable we only care about things that come from RetransformClasses.
diff --git a/test/986-native-method-bind/native_bind.cc b/test/986-native-method-bind/native_bind.cc
index 34e1f35..abb767c 100644
--- a/test/986-native-method-bind/native_bind.cc
+++ b/test/986-native-method-bind/native_bind.cc
@@ -40,22 +40,22 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test986_00024Transform_sayHi__(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
doUpPrintCall(env, "doSayHi");
}
extern "C" JNIEXPORT void JNICALL Java_art_Test986_00024Transform_sayHi2(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
doUpPrintCall(env, "doSayHi2");
}
-extern "C" JNIEXPORT void JNICALL NoReallySayGoodbye(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT void JNICALL NoReallySayGoodbye(JNIEnv* env, [[maybe_unused]] jclass klass) {
doUpPrintCall(env, "doSayBye");
}
-static void doJvmtiMethodBind(jvmtiEnv* jvmtienv ATTRIBUTE_UNUSED,
+static void doJvmtiMethodBind([[maybe_unused]] jvmtiEnv* jvmtienv,
JNIEnv* env,
- jthread thread ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jthread thread,
jmethodID m,
void* address,
/*out*/void** out_address) {
@@ -95,7 +95,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test986_setupNativeBindNotify(
- JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass) {
jvmtiEventCallbacks cb;
memset(&cb, 0, sizeof(cb));
cb.NativeMethodBind = doJvmtiMethodBind;
@@ -103,7 +103,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test986_setNativeBindNotify(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jboolean enable) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jboolean enable) {
jvmtiError res = jvmti_env->SetEventNotificationMode(enable ? JVMTI_ENABLE : JVMTI_DISABLE,
JVMTI_EVENT_NATIVE_METHOD_BIND,
nullptr);
@@ -113,7 +113,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test986_rebindTransformClass(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jclass k) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jclass k) {
JNINativeMethod m[2];
m[0].name = "sayHi";
m[0].signature = "()V";
diff --git a/test/987-agent-bind/agent_bind.cc b/test/987-agent-bind/agent_bind.cc
index 7dbdd8e..51fd74e 100644
--- a/test/987-agent-bind/agent_bind.cc
+++ b/test/987-agent-bind/agent_bind.cc
@@ -40,12 +40,12 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test987_00024Transform_sayHi__(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
doUpPrintCall(env, "doSayHi");
}
extern "C" JNIEXPORT void JNICALL Java_art_Test987_00024Transform_sayHi2(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass klass) {
doUpPrintCall(env, "doSayHi2");
}
diff --git a/test/989-method-trace-throw/method_trace.cc b/test/989-method-trace-throw/method_trace.cc
index 019b6a9..edfff90 100644
--- a/test/989-method-trace-throw/method_trace.cc
+++ b/test/989-method-trace-throw/method_trace.cc
@@ -52,8 +52,8 @@
return env->CallStaticObjectMethod(klass, targetMethod);
}
-extern "C" JNIEXPORT void JNICALL Java_art_Test989_doNothingNative(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass klass ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT void JNICALL Java_art_Test989_doNothingNative([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass klass) {
return;
}
diff --git a/test/992-source-data/source_file.cc b/test/992-source-data/source_file.cc
index 78687ff..9d98c64 100644
--- a/test/992-source-data/source_file.cc
+++ b/test/992-source-data/source_file.cc
@@ -38,7 +38,7 @@
extern "C" JNIEXPORT
jstring JNICALL Java_art_Test992_getSourceFileName(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jclass target) {
char* file = nullptr;
if (JvmtiErrorToException(env, jvmti_env, jvmti_env->GetSourceFileName(target, &file))) {
@@ -51,7 +51,7 @@
extern "C" JNIEXPORT
jstring JNICALL Java_art_Test992_getSourceDebugExtension(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jclass target) {
char* ext = nullptr;
if (JvmtiErrorToException(env, jvmti_env, jvmti_env->GetSourceDebugExtension(target, &ext))) {
diff --git a/test/993-breakpoints-non-debuggable/onload.cc b/test/993-breakpoints-non-debuggable/onload.cc
index dbbcadc..f7e7f9d 100644
--- a/test/993-breakpoints-non-debuggable/onload.cc
+++ b/test/993-breakpoints-non-debuggable/onload.cc
@@ -67,7 +67,7 @@
.can_generate_resource_exhaustion_threads_events = 0,
};
-jint OnLoad(JavaVM* vm, char* options ATTRIBUTE_UNUSED, void* reserved ATTRIBUTE_UNUSED) {
+jint OnLoad(JavaVM* vm, [[maybe_unused]] char* options, [[maybe_unused]] void* reserved) {
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), kArtTiVersion) != 0) {
printf("Unable to get jvmti env!\n");
return 1;
diff --git a/test/993-breakpoints/breakpoints.cc b/test/993-breakpoints/breakpoints.cc
index e9cf3b3..c0ee392 100644
--- a/test/993-breakpoints/breakpoints.cc
+++ b/test/993-breakpoints/breakpoints.cc
@@ -38,7 +38,7 @@
extern "C" JNIEXPORT
jobject JNICALL Java_art_Test993_constructNative(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobject target,
jclass clazz) {
jmethodID method = env->FromReflectedMethod(target);
@@ -50,7 +50,7 @@
extern "C" JNIEXPORT
void JNICALL Java_art_Test993_invokeNativeObject(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobject target,
jclass clazz,
jobject thizz) {
@@ -67,7 +67,7 @@
extern "C" JNIEXPORT
void JNICALL Java_art_Test993_invokeNativeBool(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobject target,
jclass clazz,
jobject thizz) {
@@ -84,7 +84,7 @@
extern "C" JNIEXPORT
void JNICALL Java_art_Test993_invokeNativeLong(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobject target,
jclass clazz,
jobject thizz) {
@@ -101,7 +101,7 @@
extern "C" JNIEXPORT
void JNICALL Java_art_Test993_invokeNative(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jobject target,
jclass clazz,
jobject thizz) {
diff --git a/test/996-breakpoint-obsolete/obsolete_breakpoints.cc b/test/996-breakpoint-obsolete/obsolete_breakpoints.cc
index 820af47..f8f9173 100644
--- a/test/996-breakpoint-obsolete/obsolete_breakpoints.cc
+++ b/test/996-breakpoint-obsolete/obsolete_breakpoints.cc
@@ -65,7 +65,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Test996_setBreakpointOnObsoleteMethod(
- JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jlong loc) {
+ JNIEnv* env, [[maybe_unused]] jclass k, jlong loc) {
jmethodID method = GetFirstObsoleteMethod(env, jvmti_env);
if (method == nullptr) {
return;
diff --git a/test/common/runtime_state.cc b/test/common/runtime_state.cc
index 46f3828..517eeae 100644
--- a/test/common/runtime_state.cc
+++ b/test/common/runtime_state.cc
@@ -72,7 +72,7 @@
}
extern "C" JNIEXPORT jobject JNICALL Java_Main_getCompilerFilter(JNIEnv* env,
- jclass caller ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass caller,
jclass cls) {
ScopedObjectAccess soa(env);
@@ -91,22 +91,22 @@
// public static native boolean runtimeIsSoftFail();
-extern "C" JNIEXPORT jboolean JNICALL Java_Main_runtimeIsSoftFail(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass cls ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT jboolean JNICALL Java_Main_runtimeIsSoftFail([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass cls) {
return Runtime::Current()->IsVerificationSoftFail() ? JNI_TRUE : JNI_FALSE;
}
// public static native boolean hasImage();
-extern "C" JNIEXPORT jboolean JNICALL Java_Main_hasImage(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass cls ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT jboolean JNICALL Java_Main_hasImage([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass cls) {
return Runtime::Current()->GetHeap()->HasBootImageSpace();
}
// public static native boolean isImageDex2OatEnabled();
-extern "C" JNIEXPORT jboolean JNICALL Java_Main_isImageDex2OatEnabled(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass cls ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT jboolean JNICALL Java_Main_isImageDex2OatEnabled([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass cls) {
return Runtime::Current()->IsImageDex2OatEnabled();
}
@@ -453,14 +453,14 @@
}
extern "C" JNIEXPORT void JNICALL Java_Main_forceInterpreterOnThread(JNIEnv* env,
- jclass cls ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass cls) {
ScopedObjectAccess soa(env);
MutexLock thread_list_mu(soa.Self(), *Locks::thread_list_lock_);
soa.Self()->IncrementForceInterpreterCount();
}
-extern "C" JNIEXPORT void JNICALL Java_Main_setAsyncExceptionsThrown(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass cls ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT void JNICALL Java_Main_setAsyncExceptionsThrown([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass cls) {
Runtime::Current()->SetAsyncExceptionsThrown();
}
diff --git a/test/common/stack_inspect.cc b/test/common/stack_inspect.cc
index 33f31e2..3e737fd 100644
--- a/test/common/stack_inspect.cc
+++ b/test/common/stack_inspect.cc
@@ -38,8 +38,8 @@
// public static native void disableStackFrameAsserts();
// Note: to globally disable asserts in unsupported configurations.
-extern "C" JNIEXPORT void JNICALL Java_Main_disableStackFrameAsserts(JNIEnv* env ATTRIBUTE_UNUSED,
- jclass cls ATTRIBUTE_UNUSED) {
+extern "C" JNIEXPORT void JNICALL Java_Main_disableStackFrameAsserts([[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass cls) {
asserts_enabled = false;
}
@@ -98,7 +98,7 @@
// TODO Remove 'require_deoptimizable' option once we have deoptimization through runtime frames.
extern "C" JNIEXPORT jboolean JNICALL Java_Main_isInterpretedFunction(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method, jboolean require_deoptimizable) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject method, jboolean require_deoptimizable) {
// Return false if this seems to not be an ART runtime.
if (Runtime::Current() == nullptr) {
return JNI_FALSE;
@@ -185,7 +185,7 @@
}
extern "C" JNIEXPORT jobject JNICALL Java_Main_getThisOfCaller(
- JNIEnv* env, jclass cls ATTRIBUTE_UNUSED) {
+ JNIEnv* env, [[maybe_unused]] jclass cls) {
ScopedObjectAccess soa(env);
std::unique_ptr<art::Context> context(art::Context::Create());
jobject result = nullptr;
diff --git a/test/ti-agent/agent_startup.cc b/test/ti-agent/agent_startup.cc
index d6fd266..5ebc78a 100644
--- a/test/ti-agent/agent_startup.cc
+++ b/test/ti-agent/agent_startup.cc
@@ -26,13 +26,13 @@
// Utility functions for binding jni methods.
extern "C" JNIEXPORT void JNICALL Java_art_Main_bindAgentJNI(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jstring className, jobject classLoader) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jstring className, jobject classLoader) {
ScopedUtfChars name(env, className);
BindFunctions(jvmti_env, env, name.c_str(), classLoader);
}
extern "C" JNIEXPORT void JNICALL Java_art_Main_bindAgentJNIForClass(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jclass bindClass) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jclass bindClass) {
BindFunctionsOnClass(jvmti_env, env, bindClass);
}
diff --git a/test/ti-agent/breakpoint_helper.cc b/test/ti-agent/breakpoint_helper.cc
index 83ba0a6..19134ce 100644
--- a/test/ti-agent/breakpoint_helper.cc
+++ b/test/ti-agent/breakpoint_helper.cc
@@ -60,7 +60,7 @@
extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Breakpoint_getLineNumberTableNative(
JNIEnv* env,
- jclass k ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass k,
jobject target) {
jmethodID method = env->FromReflectedMethod(target);
if (env->ExceptionCheck()) {
@@ -107,7 +107,7 @@
}
extern "C" JNIEXPORT jlong JNICALL Java_art_Breakpoint_getStartLocation(JNIEnv* env,
- jclass k ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass k,
jobject target) {
jmethodID method = env->FromReflectedMethod(target);
if (env->ExceptionCheck()) {
@@ -120,7 +120,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_clearBreakpoint(JNIEnv* env,
- jclass k ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass k,
jobject target,
jlocation location) {
jmethodID method = env->FromReflectedMethod(target);
@@ -131,7 +131,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_setBreakpoint(JNIEnv* env,
- jclass k ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass k,
jobject target,
jlocation location) {
jmethodID method = env->FromReflectedMethod(target);
@@ -143,7 +143,7 @@
extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_startBreakpointWatch(
JNIEnv* env,
- jclass k ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass k,
jclass method_klass,
jobject method,
jboolean allow_recursive,
@@ -190,7 +190,7 @@
extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_stopBreakpointWatch(
JNIEnv* env,
- jclass k ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass k,
jthread thr) {
if (JvmtiErrorToException(env, jvmti_env,
jvmti_env->SetEventNotificationMode(JVMTI_DISABLE,
diff --git a/test/ti-agent/common_load.cc b/test/ti-agent/common_load.cc
index ff8b3a8..1f35aa0 100644
--- a/test/ti-agent/common_load.cc
+++ b/test/ti-agent/common_load.cc
@@ -58,8 +58,8 @@
// A trivial OnLoad implementation that only initializes the global jvmti_env.
static jint MinimalOnLoad(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0) != 0) {
printf("Unable to get jvmti env!\n");
return 1;
diff --git a/test/ti-agent/early_return_helper.cc b/test/ti-agent/early_return_helper.cc
index e4aa5d0..df2703e 100644
--- a/test/ti-agent/early_return_helper.cc
+++ b/test/ti-agent/early_return_helper.cc
@@ -27,37 +27,37 @@
namespace common_early_return {
extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_popFrame(
- JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass k, jthread thr) {
JvmtiErrorToException(env, jvmti_env, jvmti_env->PopFrame(thr));
}
extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnFloat(
- JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jfloat val) {
+ JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jfloat val) {
JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnFloat(thr, val));
}
extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnDouble(
- JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jdouble val) {
+ JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jdouble val) {
JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnDouble(thr, val));
}
extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnLong(
- JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jlong val) {
+ JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jlong val) {
JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnLong(thr, val));
}
extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnInt(
- JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jint val) {
+ JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jint val) {
JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnInt(thr, val));
}
extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnVoid(
- JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass k, jthread thr) {
JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnVoid(thr));
}
extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnObject(
- JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jobject val) {
+ JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jobject val) {
JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnObject(thr, val));
}
diff --git a/test/ti-agent/exceptions_helper.cc b/test/ti-agent/exceptions_helper.cc
index e56c39b..6095c2e 100644
--- a/test/ti-agent/exceptions_helper.cc
+++ b/test/ti-agent/exceptions_helper.cc
@@ -107,7 +107,7 @@
extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_setupExceptionTracing(
JNIEnv* env,
- jclass exception ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass exception,
jclass klass,
jclass except,
jobject exception_event,
@@ -158,7 +158,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_enableExceptionCatchEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) {
JvmtiErrorToException(env,
jvmti_env,
jvmti_env->SetEventNotificationMode(JVMTI_ENABLE,
@@ -167,7 +167,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_enableExceptionEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) {
JvmtiErrorToException(env,
jvmti_env,
jvmti_env->SetEventNotificationMode(JVMTI_ENABLE,
@@ -176,7 +176,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_disableExceptionCatchEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) {
JvmtiErrorToException(env,
jvmti_env,
jvmti_env->SetEventNotificationMode(JVMTI_DISABLE,
@@ -185,7 +185,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_disableExceptionEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) {
JvmtiErrorToException(env,
jvmti_env,
jvmti_env->SetEventNotificationMode(JVMTI_DISABLE,
diff --git a/test/ti-agent/frame_pop_helper.cc b/test/ti-agent/frame_pop_helper.cc
index f39e185..45f3a06 100644
--- a/test/ti-agent/frame_pop_helper.cc
+++ b/test/ti-agent/frame_pop_helper.cc
@@ -34,7 +34,7 @@
static void framePopCB(jvmtiEnv* jvmti,
JNIEnv* jnienv,
jthread thr,
- jmethodID method ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jmethodID method,
jboolean was_popped_by_exception) {
FramePopData* data = nullptr;
if (JvmtiErrorToException(jnienv, jvmti,
diff --git a/test/ti-agent/redefinition_helper.cc b/test/ti-agent/redefinition_helper.cc
index 706531e..c702863 100644
--- a/test/ti-agent/redefinition_helper.cc
+++ b/test/ti-agent/redefinition_helper.cc
@@ -259,8 +259,8 @@
// Get all capabilities except those related to retransformation.
jint OnLoad(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0)) {
printf("Unable to get jvmti env!\n");
return 1;
@@ -322,13 +322,13 @@
// The hook we are using.
void JNICALL CommonClassFileLoadHookRetransformable(jvmtiEnv* jvmti_env,
- JNIEnv* jni_env ATTRIBUTE_UNUSED,
- jclass class_being_redefined ATTRIBUTE_UNUSED,
- jobject loader ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIEnv* jni_env,
+ [[maybe_unused]] jclass class_being_redefined,
+ [[maybe_unused]] jobject loader,
const char* name,
- jobject protection_domain ATTRIBUTE_UNUSED,
- jint class_data_len ATTRIBUTE_UNUSED,
- const unsigned char* class_dat ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject protection_domain,
+ [[maybe_unused]] jint class_data_len,
+ [[maybe_unused]] const unsigned char* class_dat,
jint* new_class_data_len,
unsigned char** new_class_data) {
std::string name_str(name);
@@ -435,8 +435,8 @@
// Get all capabilities except those related to retransformation.
jint OnLoad(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0)) {
printf("Unable to get jvmti env!\n");
return 1;
@@ -451,8 +451,8 @@
// Get all capabilities except those related to retransformation.
jint OnLoad(JavaVM* vm,
- char* options ATTRIBUTE_UNUSED,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0)) {
printf("Unable to get jvmti env!\n");
return 1;
diff --git a/test/ti-agent/suspend_event_helper.cc b/test/ti-agent/suspend_event_helper.cc
index cbc54d4..71b8681 100644
--- a/test/ti-agent/suspend_event_helper.cc
+++ b/test/ti-agent/suspend_event_helper.cc
@@ -113,8 +113,8 @@
JNIEnv* env,
jthread thr,
jmethodID method,
- jlocation location ATTRIBUTE_UNUSED,
- jobject exception ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jlocation location,
+ [[maybe_unused]] jobject exception) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -131,10 +131,10 @@
JNIEnv* env,
jthread thr,
jmethodID method,
- jlocation location ATTRIBUTE_UNUSED,
- jobject exception ATTRIBUTE_UNUSED,
- jmethodID catch_method ATTRIBUTE_UNUSED,
- jlocation catch_location ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jlocation location,
+ [[maybe_unused]] jobject exception,
+ [[maybe_unused]] jmethodID catch_method,
+ [[maybe_unused]] jlocation catch_location) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -164,8 +164,8 @@
JNIEnv* env,
jthread thr,
jmethodID method,
- jboolean was_popped_by_exception ATTRIBUTE_UNUSED,
- jvalue return_value ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jboolean was_popped_by_exception,
+ [[maybe_unused]] jvalue return_value) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -181,13 +181,13 @@
void JNICALL cbFieldModification(jvmtiEnv* jvmti,
JNIEnv* env,
jthread thr,
- jmethodID method ATTRIBUTE_UNUSED,
- jlocation location ATTRIBUTE_UNUSED,
- jclass field_klass ATTRIBUTE_UNUSED,
- jobject object ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jmethodID method,
+ [[maybe_unused]] jlocation location,
+ [[maybe_unused]] jclass field_klass,
+ [[maybe_unused]] jobject object,
jfieldID field,
- char signature_type ATTRIBUTE_UNUSED,
- jvalue new_value ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] char signature_type,
+ [[maybe_unused]] jvalue new_value) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -204,10 +204,10 @@
void JNICALL cbFieldAccess(jvmtiEnv* jvmti,
JNIEnv* env,
jthread thr,
- jmethodID method ATTRIBUTE_UNUSED,
- jlocation location ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jmethodID method,
+ [[maybe_unused]] jlocation location,
jclass field_klass,
- jobject object ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject object,
jfieldID field) {
TestData* data;
if (JvmtiErrorToException(
@@ -247,8 +247,8 @@
void JNICALL cbFramePop(jvmtiEnv* jvmti,
JNIEnv* env,
jthread thr,
- jmethodID method ATTRIBUTE_UNUSED,
- jboolean was_popped_by_exception ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jmethodID method,
+ [[maybe_unused]] jboolean was_popped_by_exception) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -281,7 +281,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupTest(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jclass klass) {
jvmtiCapabilities caps;
memset(&caps, 0, sizeof(caps));
// Most of these will already be there but might as well be complete.
@@ -374,7 +374,7 @@
extern "C" JNIEXPORT void JNICALL
Java_art_SuspendEvents_setupSuspendClassEvent(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jint event_num,
jobjectArray interesting_names,
jthread thr) {
@@ -409,7 +409,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendClassEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -432,7 +432,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendSingleStepAt(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject meth, jlocation loc, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jobject meth, jlocation loc, jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -453,7 +453,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendSingleStepFor(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -470,7 +470,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendPopFrameEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint offset, jobject breakpoint_func, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jint offset, jobject breakpoint_func, jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -501,7 +501,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendPopFrameEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -528,7 +528,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendBreakpointFor(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject meth, jlocation loc, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jobject meth, jlocation loc, jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -553,7 +553,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendBreakpointFor(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -577,7 +577,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendExceptionEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method, jboolean is_catch, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jobject method, jboolean is_catch, jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -599,7 +599,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendExceptionEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -622,7 +622,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendMethodEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method, jboolean enter, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jobject method, jboolean enter, jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -644,7 +644,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendMethodEvent(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -668,7 +668,7 @@
extern "C" JNIEXPORT void JNICALL
Java_art_SuspendEvents_setupFieldSuspendFor(JNIEnv* env,
- jclass klass ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass klass,
jclass target_klass,
jobject field,
jboolean access,
@@ -706,7 +706,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearFieldSuspendFor(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -744,7 +744,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupWaitForNativeCall(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -761,7 +761,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearWaitForNativeCall(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
@@ -775,7 +775,7 @@
}
extern "C" JNIEXPORT void JNICALL
-Java_art_SuspendEvents_waitForSuspendHit(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+Java_art_SuspendEvents_waitForSuspendHit(JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) {
TestData* data;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) {
diff --git a/test/ti-agent/ti_utf.h b/test/ti-agent/ti_utf.h
index 15fe22c..cfde098 100644
--- a/test/ti-agent/ti_utf.h
+++ b/test/ti-agent/ti_utf.h
@@ -179,7 +179,7 @@
inline size_t CountModifiedUtf8BytesInUtf16(const uint16_t* chars, size_t char_count) {
// FIXME: We should not emit 4-byte sequences. Bug: 192935764
size_t result = 0;
- auto append = [&](char c ATTRIBUTE_UNUSED) { ++result; };
+ auto append = [&]([[maybe_unused]] char c) { ++result; };
ConvertUtf16ToUtf8</*kUseShortZero=*/ false,
/*kUse4ByteSequence=*/ true,
/*kReplaceBadSurrogates=*/ false>(chars, char_count, append);
diff --git a/test/ti-agent/trace_helper.cc b/test/ti-agent/trace_helper.cc
index 11e1c15..58958cb 100644
--- a/test/ti-agent/trace_helper.cc
+++ b/test/ti-agent/trace_helper.cc
@@ -303,7 +303,7 @@
static void classPrepareCB(jvmtiEnv* jvmti,
JNIEnv* jnienv,
- jthread thr ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jthread thr,
jclass klass) {
TraceData* data = nullptr;
if (JvmtiErrorToException(jnienv, jvmti,
@@ -441,7 +441,7 @@
extern "C" JNIEXPORT void JNICALL Java_art_Trace_watchFieldModification(
JNIEnv* env,
- jclass trace ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass trace,
jobject field_obj) {
jfieldID field;
jclass klass;
@@ -455,7 +455,7 @@
extern "C" JNIEXPORT void JNICALL Java_art_Trace_watchFieldAccess(
JNIEnv* env,
- jclass trace ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass trace,
jobject field_obj) {
jfieldID field;
jclass klass;
@@ -468,7 +468,7 @@
extern "C" JNIEXPORT void JNICALL Java_art_Trace_enableTracing2(
JNIEnv* env,
- jclass trace ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jclass trace,
jclass klass,
jobject enter,
jobject exit,
@@ -610,7 +610,7 @@
}
extern "C" JNIEXPORT void JNICALL Java_art_Trace_disableTracing(
- JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) {
+ JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) {
TraceData* data = nullptr;
if (JvmtiErrorToException(
env, jvmti_env, jvmti_env->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&data)))) {
diff --git a/test/ti-stress/stress.cc b/test/ti-stress/stress.cc
index cd7af10..4ba4564 100644
--- a/test/ti-stress/stress.cc
+++ b/test/ti-stress/stress.cc
@@ -611,11 +611,11 @@
// The hook we are using.
void JNICALL ClassFileLoadHookSecretNoOp(jvmtiEnv* jvmti,
- JNIEnv* jni_env ATTRIBUTE_UNUSED,
- jclass class_being_redefined ATTRIBUTE_UNUSED,
- jobject loader ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIEnv* jni_env,
+ [[maybe_unused]] jclass class_being_redefined,
+ [[maybe_unused]] jobject loader,
const char* name,
- jobject protection_domain ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject protection_domain,
jint class_data_len,
const unsigned char* class_data,
jint* new_class_data_len,
@@ -679,7 +679,7 @@
// Do final setup during the VMInit callback. By this time most things are all setup.
static void JNICALL PerformFinalSetupVMInit(jvmtiEnv *jvmti_env,
JNIEnv* jni_env,
- jthread thread ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] jthread thread) {
// Load the VMClassLoader class. We will get a ClassNotFound exception because we don't have
// visibility but the class will be loaded behind the scenes.
LOG(INFO) << "manual load & initialization of class java/lang/VMClassLoader!";
@@ -754,7 +754,7 @@
extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* vm,
char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* reserved) {
jvmtiEnv* jvmti = nullptr;
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti), JVMTI_VERSION_1_0)) {
LOG(ERROR) << "Unable to get jvmti env.";
diff --git a/tools/fuzzer/libart_verify_dex_fuzzer.cc b/tools/fuzzer/libart_verify_dex_fuzzer.cc
index 8c57da3..96a02ed 100644
--- a/tools/fuzzer/libart_verify_dex_fuzzer.cc
+++ b/tools/fuzzer/libart_verify_dex_fuzzer.cc
@@ -17,7 +17,7 @@
#include "base/mem_map.h"
#include "dex/dex_file_loader.h"
-extern "C" int LLVMFuzzerInitialize(int* argc ATTRIBUTE_UNUSED, char*** argv ATTRIBUTE_UNUSED) {
+extern "C" int LLVMFuzzerInitialize([[maybe_unused]] int* argc, [[maybe_unused]] char*** argv) {
// Initialize environment.
// TODO(solanes): `art::MemMap::Init` is not needed for the current DexFileLoader code path.
// Consider removing it once the fuzzer stabilizes and check that it is actually not needed.
diff --git a/tools/jvmti-agents/breakpoint-logger/breakpoint_logger.cc b/tools/jvmti-agents/breakpoint-logger/breakpoint_logger.cc
index 2f8b682..25bf794 100644
--- a/tools/jvmti-agents/breakpoint-logger/breakpoint_logger.cc
+++ b/tools/jvmti-agents/breakpoint-logger/breakpoint_logger.cc
@@ -36,7 +36,7 @@
std::vector<SingleBreakpointTarget> bps;
};
-static void VMInitCB(jvmtiEnv* jvmti, JNIEnv* env, jthread thr ATTRIBUTE_UNUSED) {
+static void VMInitCB(jvmtiEnv* jvmti, JNIEnv* env, [[maybe_unused]] jthread thr) {
BreakpointTargets* all_targets = nullptr;
jvmtiError err = jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&all_targets));
if (err != JVMTI_ERROR_NONE || all_targets == nullptr) {
@@ -350,7 +350,7 @@
static jint AgentStart(StartType start,
JavaVM* vm,
char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* reserved) {
jvmtiEnv* jvmti = nullptr;
jvmtiError error = JVMTI_ERROR_NONE;
{
diff --git a/tools/jvmti-agents/dump-jvmti-state/dump-jvmti.cc b/tools/jvmti-agents/dump-jvmti-state/dump-jvmti.cc
index 71a0115..afa8d61 100644
--- a/tools/jvmti-agents/dump-jvmti-state/dump-jvmti.cc
+++ b/tools/jvmti-agents/dump-jvmti-state/dump-jvmti.cc
@@ -88,7 +88,9 @@
} // namespace
-static jint AgentStart(JavaVM* vm, char* options ATTRIBUTE_UNUSED, void* reserved ATTRIBUTE_UNUSED) {
+static jint AgentStart(JavaVM* vm,
+ [[maybe_unused]] char* options,
+ [[maybe_unused]] void* reserved) {
jvmtiEnv* jvmti = nullptr;
if (SetupJvmtiEnv(vm, &jvmti) != JNI_OK) {
LOG(ERROR) << "Could not get JVMTI env or ArtTiEnv!";
diff --git a/tools/jvmti-agents/enable-vlog/enablevlog.cc b/tools/jvmti-agents/enable-vlog/enablevlog.cc
index 7bee013..d42b0ff 100644
--- a/tools/jvmti-agents/enable-vlog/enablevlog.cc
+++ b/tools/jvmti-agents/enable-vlog/enablevlog.cc
@@ -89,7 +89,7 @@
} // namespace
-static jint AgentStart(JavaVM* vm, char* options, void* reserved ATTRIBUTE_UNUSED) {
+static jint AgentStart(JavaVM* vm, char* options, [[maybe_unused]] void* reserved) {
jvmtiEnv* jvmti = nullptr;
if (SetupJvmtiEnv(vm, &jvmti) != JNI_OK) {
LOG(ERROR) << "Could not get JVMTI env or ArtTiEnv!";
diff --git a/tools/jvmti-agents/field-counts/fieldcount.cc b/tools/jvmti-agents/field-counts/fieldcount.cc
index 5a4b00e..526d68f 100644
--- a/tools/jvmti-agents/field-counts/fieldcount.cc
+++ b/tools/jvmti-agents/field-counts/fieldcount.cc
@@ -195,7 +195,7 @@
}
}
-static void VMDeathCb(jvmtiEnv* jvmti, JNIEnv* env ATTRIBUTE_UNUSED) {
+static void VMDeathCb(jvmtiEnv* jvmti, [[maybe_unused]] JNIEnv* env) {
DataDumpRequestCb(jvmti);
RequestList* list = nullptr;
CHECK_JVMTI(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&list)));
@@ -211,7 +211,7 @@
CHECK_JVMTI(jvmti->SetEnvironmentLocalStorage(list));
}
-static void VMInitCb(jvmtiEnv* jvmti, JNIEnv* env, jobject thr ATTRIBUTE_UNUSED) {
+static void VMInitCb(jvmtiEnv* jvmti, JNIEnv* env, [[maybe_unused]] jobject thr) {
char* args = nullptr;
CHECK_JVMTI(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&args)));
CHECK_JVMTI(jvmti->SetEnvironmentLocalStorage(nullptr));
@@ -260,14 +260,14 @@
// Late attachment (e.g. 'am attach-agent').
extern "C" JNIEXPORT jint JNICALL Agent_OnAttach(JavaVM* vm,
char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* reserved) {
return AgentStart(vm, options, /*is_onload=*/false);
}
// Early attachment
extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* jvm,
char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* reserved) {
return AgentStart(jvm, options, /*is_onload=*/true);
}
diff --git a/tools/jvmti-agents/field-null-percent/fieldnull.cc b/tools/jvmti-agents/field-null-percent/fieldnull.cc
index 016164f..b2cd13b 100644
--- a/tools/jvmti-agents/field-null-percent/fieldnull.cc
+++ b/tools/jvmti-agents/field-null-percent/fieldnull.cc
@@ -140,7 +140,7 @@
}
}
-static void VMDeathCb(jvmtiEnv* jvmti, JNIEnv* env ATTRIBUTE_UNUSED) {
+static void VMDeathCb(jvmtiEnv* jvmti, [[maybe_unused]] JNIEnv* env) {
DataDumpRequestCb(jvmti);
RequestList* list = nullptr;
CHECK_JVMTI(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&list)));
@@ -154,7 +154,7 @@
CHECK_JVMTI(jvmti->SetEnvironmentLocalStorage(list));
}
-static void VMInitCb(jvmtiEnv* jvmti, JNIEnv* env, jobject thr ATTRIBUTE_UNUSED) {
+static void VMInitCb(jvmtiEnv* jvmti, JNIEnv* env, [[maybe_unused]] jobject thr) {
char* args = nullptr;
CHECK_JVMTI(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&args)));
CHECK_JVMTI(jvmti->SetEnvironmentLocalStorage(nullptr));
@@ -201,16 +201,16 @@
}
// Late attachment (e.g. 'am attach-agent').
-extern "C" JNIEXPORT jint JNICALL Agent_OnAttach(JavaVM *vm,
+extern "C" JNIEXPORT jint JNICALL Agent_OnAttach(JavaVM* vm,
char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* reserved) {
return AgentStart(vm, options, /*is_onload=*/false);
}
// Early attachment
extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* jvm,
char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* reserved) {
return AgentStart(jvm, options, /*is_onload=*/true);
}
diff --git a/tools/jvmti-agents/jit-load/jitload.cc b/tools/jvmti-agents/jit-load/jitload.cc
index 6ef7b67..f5d6ff4 100644
--- a/tools/jvmti-agents/jit-load/jitload.cc
+++ b/tools/jvmti-agents/jit-load/jitload.cc
@@ -51,8 +51,8 @@
}
JNICALL void VmInitCb(jvmtiEnv* jvmti,
- JNIEnv* env ATTRIBUTE_UNUSED,
- jthread curthread ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jthread curthread) {
jthread jit_thread = GetJitThread();
if (jit_thread != nullptr) {
CHECK_EQ(jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_CLASS_PREPARE, jit_thread),
@@ -72,8 +72,8 @@
}
JNICALL void ClassPrepareJit(jvmtiEnv* jvmti,
- JNIEnv* jni_env ATTRIBUTE_UNUSED,
- jthread thr ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIEnv* jni_env,
+ [[maybe_unused]] jthread thr,
jclass klass) {
AgentOptions* ops;
CHECK_CALL_SUCCESS(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&ops)));
@@ -85,9 +85,7 @@
CHECK_CALL_SUCCESS(jvmti->Deallocate(reinterpret_cast<unsigned char*>(klass_name)));
}
-JNICALL void VMDeathCb(jvmtiEnv* jvmti, JNIEnv* env ATTRIBUTE_UNUSED) {
- DataDumpRequestCb(jvmti);
-}
+JNICALL void VMDeathCb(jvmtiEnv* jvmti, [[maybe_unused]] JNIEnv* env) { DataDumpRequestCb(jvmti); }
static jvmtiEnv* SetupJvmti(JavaVM* vm, const char* options) {
android::base::InitLogging(/* argv= */nullptr);
diff --git a/tools/jvmti-agents/list-extensions/list-extensions.cc b/tools/jvmti-agents/list-extensions/list-extensions.cc
index 6d8237a..cce42e4 100644
--- a/tools/jvmti-agents/list-extensions/list-extensions.cc
+++ b/tools/jvmti-agents/list-extensions/list-extensions.cc
@@ -147,7 +147,7 @@
return JNI_OK;
}
-jint AgentStart(JavaVM* vm, char* options ATTRIBUTE_UNUSED, void* reserved ATTRIBUTE_UNUSED) {
+jint AgentStart(JavaVM* vm, [[maybe_unused]] char* options, [[maybe_unused]] void* reserved) {
if (SetupJvmtiEnv(vm) != JNI_OK) {
LOG(ERROR) << "Could not get JVMTI env or ArtTiEnv!";
return JNI_ERR;
diff --git a/tools/jvmti-agents/simple-force-redefine/forceredefine.cc b/tools/jvmti-agents/simple-force-redefine/forceredefine.cc
index 3474238..72e5fe0 100644
--- a/tools/jvmti-agents/simple-force-redefine/forceredefine.cc
+++ b/tools/jvmti-agents/simple-force-redefine/forceredefine.cc
@@ -134,11 +134,11 @@
}
static void CbClassFileLoadHook(jvmtiEnv* jvmti,
- JNIEnv* env ATTRIBUTE_UNUSED,
- jclass classBeingRedefined ATTRIBUTE_UNUSED,
- jobject loader ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIEnv* env,
+ [[maybe_unused]] jclass classBeingRedefined,
+ [[maybe_unused]] jobject loader,
const char* name,
- jobject protectionDomain ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject protectionDomain,
jint classDataLen,
const unsigned char* classData,
jint* newClassDataLen,
@@ -212,7 +212,7 @@
env->DeleteLocalRef(klass);
}
-static void AgentMain(jvmtiEnv* jvmti, JNIEnv* jni, void* arg ATTRIBUTE_UNUSED) {
+static void AgentMain(jvmtiEnv* jvmti, JNIEnv* jni, [[maybe_unused]] void* arg) {
AgentInfo* ai = GetAgentInfo(jvmti);
std::string klass_name;
jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_CLASS_FILE_LOAD_HOOK, nullptr);
@@ -227,7 +227,7 @@
}
}
-static void CbVmInit(jvmtiEnv* jvmti, JNIEnv* env, jthread thr ATTRIBUTE_UNUSED) {
+static void CbVmInit(jvmtiEnv* jvmti, JNIEnv* env, [[maybe_unused]] jthread thr) {
// Create a Thread object.
ScopedLocalRef<jobject> thread_name(env, env->NewStringUTF("Agent Thread"));
if (thread_name.get() == nullptr) {
@@ -263,7 +263,7 @@
} // namespace
template <bool kIsOnLoad>
-static jint AgentStart(JavaVM* vm, char* options, void* reserved ATTRIBUTE_UNUSED) {
+static jint AgentStart(JavaVM* vm, char* options, [[maybe_unused]] void* reserved) {
jvmtiEnv* jvmti = nullptr;
if (vm->GetEnv(reinterpret_cast<void**>(&jvmti), JVMTI_VERSION_1_1) != JNI_OK ||
diff --git a/tools/jvmti-agents/simple-profile/simple_profile.cc b/tools/jvmti-agents/simple-profile/simple_profile.cc
index 7161142..9ea99d5 100644
--- a/tools/jvmti-agents/simple-profile/simple_profile.cc
+++ b/tools/jvmti-agents/simple-profile/simple_profile.cc
@@ -192,7 +192,7 @@
CHECK_JVMTI(jvmti->RunAgentThread(
thread.get(),
- [](jvmtiEnv* jvmti, JNIEnv* jni, void* unused_data ATTRIBUTE_UNUSED) {
+ [](jvmtiEnv* jvmti, JNIEnv* jni, [[maybe_unused]] void* unused_data) {
SimpleProfileData* data = SimpleProfileData::GetProfileData(jvmti);
data->RunDumpLoop(jvmti, jni);
},
@@ -354,7 +354,7 @@
static void MethodEntryCB(jvmtiEnv* jvmti_env,
JNIEnv* env,
- jthread thread ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jthread thread,
jmethodID method) {
SimpleProfileData* data = SimpleProfileData::GetProfileData(jvmti_env);
data->Enter(jvmti_env, env, method);
@@ -418,7 +418,7 @@
static jint AgentStart(StartType start,
JavaVM* vm,
const char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+ [[maybe_unused]] void* reserved) {
if (options == nullptr) {
options = "";
}
@@ -476,7 +476,7 @@
callbacks.VMInit = &VMInitCB;
callbacks.DataDumpRequest = &DataDumpCb;
callbacks.VMDeath = &VMDeathCB;
- callbacks.ThreadEnd = [](jvmtiEnv* env, JNIEnv* jni, jthread thr ATTRIBUTE_UNUSED) {
+ callbacks.ThreadEnd = [](jvmtiEnv* env, JNIEnv* jni, [[maybe_unused]] jthread thr) {
VMDeathCB(env, jni);
};
diff --git a/tools/jvmti-agents/ti-alloc-sample/ti_alloc_sample.cc b/tools/jvmti-agents/ti-alloc-sample/ti_alloc_sample.cc
index d719db5..1b359b8 100644
--- a/tools/jvmti-agents/ti-alloc-sample/ti_alloc_sample.cc
+++ b/tools/jvmti-agents/ti-alloc-sample/ti_alloc_sample.cc
@@ -310,7 +310,7 @@
static void JNICALL logVMObjectAlloc(jvmtiEnv* jvmti,
JNIEnv* jni,
jthread thread,
- jobject obj ATTRIBUTE_UNUSED,
+ [[maybe_unused]] jobject obj,
jclass klass,
jlong size) {
// Sample only once out of sampling_rate tries, and prevent recursive allocation tracking,
@@ -407,9 +407,7 @@
return true;
}
-static jint AgentStart(JavaVM* vm,
- char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+static jint AgentStart(JavaVM* vm, char* options, [[maybe_unused]] void* reserved) {
// Handle the sampling rate, depth limit, and output path, if set.
if (!ProcessOptions(options)) {
return JNI_ERR;
diff --git a/tools/jvmti-agents/ti-fast/tifast.cc b/tools/jvmti-agents/ti-fast/tifast.cc
index bb49aa1..4c182c8 100644
--- a/tools/jvmti-agents/ti-fast/tifast.cc
+++ b/tools/jvmti-agents/ti-fast/tifast.cc
@@ -37,7 +37,8 @@
// env.
static constexpr jint kArtTiVersion = JVMTI_VERSION_1_2 | 0x40000000;
-template <typename ...Args> static void Unused(Args... args ATTRIBUTE_UNUSED) {}
+template <typename... Args>
+static void Unused([[maybe_unused]] Args... args) {}
// jthread is a typedef of jobject so we use this to allow the templates to distinguish them.
struct jthreadContainer { jthread thread; };
@@ -407,7 +408,8 @@
};
// Base case
-template<> void LogPrinter::PrintRest(jvmtiEnv* jvmti ATTRIBUTE_UNUSED, JNIEnv* jni) {
+template <>
+void LogPrinter::PrintRest([[maybe_unused]] jvmtiEnv* jvmti, JNIEnv* jni) {
if (jni == nullptr) {
start_args = "jvmtiEnv*";
} else {
@@ -668,9 +670,7 @@
} // namespace
-static jint AgentStart(JavaVM* vm,
- char* options,
- void* reserved ATTRIBUTE_UNUSED) {
+static jint AgentStart(JavaVM* vm, char* options, [[maybe_unused]] void* reserved) {
jvmtiEnv* jvmti = nullptr;
jvmtiError error = JVMTI_ERROR_NONE;
if (SetupJvmtiEnv(vm, &jvmti) != JNI_OK) {
diff --git a/tools/jvmti-agents/titrace/titrace.cc b/tools/jvmti-agents/titrace/titrace.cc
index d9fab25..7178455 100644
--- a/tools/jvmti-agents/titrace/titrace.cc
+++ b/tools/jvmti-agents/titrace/titrace.cc
@@ -207,8 +207,8 @@
struct EventCallbacks {
static void SingleStep(jvmtiEnv* jvmti_env,
- JNIEnv* jni_env ATTRIBUTE_UNUSED,
- jthread thread ATTRIBUTE_UNUSED,
+ [[maybe_unused]] JNIEnv* jni_env,
+ [[maybe_unused]] jthread thread,
jmethodID method,
jlocation location) {
TraceStatistics& stats = TraceStatistics::GetSingleton();
@@ -218,7 +218,7 @@
// Use "kill -SIGQUIT" to generate a data dump request.
// Useful when running an android app since it doesn't go through
// a normal Agent_OnUnload.
- static void DataDumpRequest(jvmtiEnv* jvmti_env ATTRIBUTE_UNUSED) {
+ static void DataDumpRequest([[maybe_unused]] jvmtiEnv* jvmti_env) {
TraceStatistics& stats = TraceStatistics::GetSingleton();
stats.Log();
}
@@ -305,10 +305,9 @@
// Note: This is not called for normal Android apps,
// use "kill -SIGQUIT" instead to generate a data dump request.
-JNIEXPORT void JNICALL Agent_OnUnload(JavaVM* vm ATTRIBUTE_UNUSED) {
+JNIEXPORT void JNICALL Agent_OnUnload([[maybe_unused]] JavaVM* vm) {
using namespace titrace; // NOLINT [build/namespaces] [5]
LOG(INFO) << "Agent_OnUnload: Goodbye";
TraceStatistics::GetSingleton().Log();
}
-
diff --git a/tools/signal_dumper/signal_dumper.cc b/tools/signal_dumper/signal_dumper.cc
index bedb8dc..ebbe6ad 100644
--- a/tools/signal_dumper/signal_dumper.cc
+++ b/tools/signal_dumper/signal_dumper.cc
@@ -657,7 +657,7 @@
} // namespace
} // namespace art
-int main(int argc ATTRIBUTE_UNUSED, char** argv) {
+int main([[maybe_unused]] int argc, char** argv) {
android::base::InitLogging(argv);
int signal = SIGRTMIN + 2;
diff --git a/tools/tracefast-plugin/tracefast.cc b/tools/tracefast-plugin/tracefast.cc
index abc871d..c9c135a 100644
--- a/tools/tracefast-plugin/tracefast.cc
+++ b/tools/tracefast-plugin/tracefast.cc
@@ -43,73 +43,73 @@
public:
Tracer() {}
- void MethodEntered(art::Thread* thread ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED) override
+ void MethodEntered([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::ArtMethod* method) override
REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void MethodExited(art::Thread* thread ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- art::instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED,
- art::MutableHandle<art::mirror::Object>& return_value ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void MethodExited([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] art::instrumentation::OptionalFrame frame,
+ [[maybe_unused]] art::MutableHandle<art::mirror::Object>& return_value) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void MethodExited(art::Thread* thread ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- art::instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED,
- art::JValue& return_value ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void MethodExited([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] art::instrumentation::OptionalFrame frame,
+ [[maybe_unused]] art::JValue& return_value) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void MethodUnwind(art::Thread* thread ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void MethodUnwind([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void DexPcMoved(art::Thread* thread ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t new_dex_pc ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void DexPcMoved([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::Handle<art::mirror::Object> this_object,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] uint32_t new_dex_pc) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void FieldRead(art::Thread* thread ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- art::ArtField* field ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void FieldRead([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::Handle<art::mirror::Object> this_object,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] art::ArtField* field) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void FieldWritten(art::Thread* thread ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- art::ArtField* field ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Object> field_value ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void FieldWritten([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::Handle<art::mirror::Object> this_object,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] art::ArtField* field,
+ [[maybe_unused]] art::Handle<art::mirror::Object> field_value) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void FieldWritten(art::Thread* thread ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- art::ArtField* field ATTRIBUTE_UNUSED,
- const art::JValue& field_value ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void FieldWritten([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::Handle<art::mirror::Object> this_object,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] art::ArtField* field,
+ [[maybe_unused]] const art::JValue& field_value) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void ExceptionThrown(art::Thread* thread ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Throwable> exception_object ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void ExceptionThrown([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::Handle<art::mirror::Throwable> exception_object)
+ override REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void ExceptionHandled(art::Thread* self ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Throwable> throwable ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void ExceptionHandled([[maybe_unused]] art::Thread* self,
+ [[maybe_unused]] art::Handle<art::mirror::Throwable> throwable) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void Branch(art::Thread* thread ATTRIBUTE_UNUSED,
- art::ArtMethod* method ATTRIBUTE_UNUSED,
- uint32_t dex_pc ATTRIBUTE_UNUSED,
- int32_t dex_pc_offset ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void Branch([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] art::ArtMethod* method,
+ [[maybe_unused]] uint32_t dex_pc,
+ [[maybe_unused]] int32_t dex_pc_offset) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
- void WatchedFramePop(art::Thread* thread ATTRIBUTE_UNUSED,
- const art::ShadowFrame& frame ATTRIBUTE_UNUSED)
- override REQUIRES_SHARED(art::Locks::mutator_lock_) { }
+ void WatchedFramePop([[maybe_unused]] art::Thread* thread,
+ [[maybe_unused]] const art::ShadowFrame& frame) override
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {}
private:
DISALLOW_COPY_AND_ASSIGN(Tracer);
diff --git a/tools/veridex/flow_analysis.cc b/tools/veridex/flow_analysis.cc
index 6a4d351..f30eb09 100644
--- a/tools/veridex/flow_analysis.cc
+++ b/tools/veridex/flow_analysis.cc
@@ -760,7 +760,7 @@
}
}
-void FlowAnalysisCollector::AnalyzeFieldSet(const Instruction& instruction ATTRIBUTE_UNUSED) {
+void FlowAnalysisCollector::AnalyzeFieldSet([[maybe_unused]] const Instruction& instruction) {
// There are no fields that escape reflection uses.
}
@@ -792,7 +792,7 @@
return GetReturnType(id);
}
-void FlowAnalysisSubstitutor::AnalyzeFieldSet(const Instruction& instruction ATTRIBUTE_UNUSED) {
+void FlowAnalysisSubstitutor::AnalyzeFieldSet([[maybe_unused]] const Instruction& instruction) {
// TODO: analyze field sets.
}