diff options
author | 2023-05-16 10:32:54 +0000 | |
---|---|---|
committer | 2023-05-22 10:36:39 +0000 | |
commit | 78f3c72e8948087352788997a70854dee613352c (patch) | |
tree | 306db3c15bc4b7af149bcf3e07be533e50679b88 | |
parent | dc771261232c2ff702373f396a5a7fe586e2f0a6 (diff) |
Use C++17's [[maybe_unused]] attribute in ART
Bug: 169680875
Test: mmm art
Change-Id: Ic0cc320891c42b07a2b5520a584d2b62052e7235
301 files changed, 2182 insertions, 2180 deletions
diff --git a/artd/artd_main.cc b/artd/artd_main.cc index 3644eba9a2..9fe5bd81b4 100644 --- a/artd/artd_main.cc +++ b/artd/artd_main.cc @@ -22,7 +22,7 @@ #include "android/binder_process.h" #include "artd.h" -int main(int argc ATTRIBUTE_UNUSED, char* argv[]) { +int main([[maybe_unused]] int argc, char* argv[]) { android::base::InitLogging(argv); auto artd = ndk::SharedRefBase::make<art::artd::Artd>(); diff --git a/benchmark/golem-tiagent/golem-tiagent.cc b/benchmark/golem-tiagent/golem-tiagent.cc index be2c72795b..9fe06442b4 100644 --- a/benchmark/golem-tiagent/golem-tiagent.cc +++ b/benchmark/golem-tiagent/golem-tiagent.cc @@ -35,9 +35,9 @@ void CheckJvmtiError(jvmtiEnv* env, jvmtiError error) { } } -static void JNICALL VMInitCallback(jvmtiEnv *jenv ATTRIBUTE_UNUSED, +static void JNICALL VMInitCallback([[maybe_unused]] jvmtiEnv* jenv, JNIEnv* jni_env, - jthread thread ATTRIBUTE_UNUSED) { + [[maybe_unused]] jthread thread) { // Set a breakpoint on a rare method that we won't expect to be hit. // java.lang.Thread.stop is deprecated and not expected to be used. jclass cl = jni_env->FindClass("java/lang/Thread"); @@ -57,8 +57,8 @@ static void JNICALL VMInitCallback(jvmtiEnv *jenv ATTRIBUTE_UNUSED, } extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { // Setup jvmti_env if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0) != 0) { LOG(ERROR) << "Unable to get jvmti env!"; diff --git a/cmdline/cmdline.h b/cmdline/cmdline.h index b8ca7d0efc..2108a7a172 100644 --- a/cmdline/cmdline.h +++ b/cmdline/cmdline.h @@ -321,15 +321,13 @@ struct CmdlineArgs { } protected: - virtual ParseStatus ParseCustom(const char* raw_option ATTRIBUTE_UNUSED, - size_t raw_option_length ATTRIBUTE_UNUSED, - std::string* error_msg ATTRIBUTE_UNUSED) { + virtual ParseStatus ParseCustom([[maybe_unused]] const char* raw_option, + [[maybe_unused]] size_t raw_option_length, + [[maybe_unused]] std::string* error_msg) { return kParseUnknownArgument; } - virtual ParseStatus ParseChecks(std::string* error_msg ATTRIBUTE_UNUSED) { - return kParseOk; - } + virtual ParseStatus ParseChecks([[maybe_unused]] std::string* error_msg) { return kParseOk; } }; template <typename Args = CmdlineArgs> diff --git a/cmdline/cmdline_parser_test.cc b/cmdline/cmdline_parser_test.cc index effbee93c7..4d53857a09 100644 --- a/cmdline/cmdline_parser_test.cc +++ b/cmdline/cmdline_parser_test.cc @@ -75,12 +75,13 @@ namespace art { // that are nevertheless equal. // If a test is failing because the structs aren't "equal" when they really are // then it's recommended to implement operator== for it instead. - template <typename T, typename ... Ignore> - bool UsuallyEquals(const T& expected, const T& actual, - const Ignore& ... more ATTRIBUTE_UNUSED, - typename std::enable_if<std::is_pod<T>::value>::type* = nullptr, - typename std::enable_if<!detail::SupportsEqualityOperator<T>::value>::type* = nullptr - ) { + template <typename T, typename... Ignore> + bool UsuallyEquals( + const T& expected, + const T& actual, + [[maybe_unused]] const Ignore&... more, + typename std::enable_if<std::is_pod<T>::value>::type* = nullptr, + typename std::enable_if<!detail::SupportsEqualityOperator<T>::value>::type* = nullptr) { return memcmp(std::addressof(expected), std::addressof(actual), sizeof(T)) == 0; } diff --git a/cmdline/cmdline_type_parser.h b/cmdline/cmdline_type_parser.h index 82a76f4ad9..10e28f3f59 100644 --- a/cmdline/cmdline_type_parser.h +++ b/cmdline/cmdline_type_parser.h @@ -34,7 +34,7 @@ struct CmdlineTypeParser { // // e.g. if the argument definition was "foo:_", and the user-provided input was "foo:bar", // then args is "bar". - Result Parse(const std::string& args ATTRIBUTE_UNUSED) { + Result Parse([[maybe_unused]] const std::string& args) { assert(false); return Result::Failure("Missing type specialization and/or value map"); } @@ -46,8 +46,8 @@ struct CmdlineTypeParser { // // If the initial value does not exist yet, a default value is created by // value-initializing with 'T()'. - Result ParseAndAppend(const std::string& args ATTRIBUTE_UNUSED, - T& existing_value ATTRIBUTE_UNUSED) { + Result ParseAndAppend([[maybe_unused]] const std::string& args, + [[maybe_unused]] T& existing_value) { assert(false); return Result::Failure("Missing type specialization and/or value map"); } diff --git a/cmdline/token_range.h b/cmdline/token_range.h index e917e1d6d0..f662ca6df9 100644 --- a/cmdline/token_range.h +++ b/cmdline/token_range.h @@ -55,7 +55,7 @@ struct TokenRange { #if 0 // Copying-from-vector constructor. - TokenRange(const TokenList& token_list ATTRIBUTE_UNUSED, + TokenRange([[maybe_unused]] const TokenList& token_list, TokenList::const_iterator it_begin, TokenList::const_iterator it_end) : token_list_(new TokenList(it_begin, it_end)), diff --git a/compiler/common_compiler_test.cc b/compiler/common_compiler_test.cc index 442b96e5fa..a37f516759 100644 --- a/compiler/common_compiler_test.cc +++ b/compiler/common_compiler_test.cc @@ -133,9 +133,9 @@ class CommonCompilerTestImpl::OneCompiledMethodStorage final : public CompiledCo CompiledMethod* CreateCompiledMethod(InstructionSet instruction_set, ArrayRef<const uint8_t> code, ArrayRef<const uint8_t> stack_map, - ArrayRef<const uint8_t> cfi ATTRIBUTE_UNUSED, + [[maybe_unused]] ArrayRef<const uint8_t> cfi, ArrayRef<const linker::LinkerPatch> patches, - bool is_intrinsic ATTRIBUTE_UNUSED) override { + [[maybe_unused]] bool is_intrinsic) override { // Supports only one method at a time. CHECK_EQ(instruction_set_, InstructionSet::kNone); CHECK_NE(instruction_set, InstructionSet::kNone); @@ -150,15 +150,15 @@ class CommonCompilerTestImpl::OneCompiledMethodStorage final : public CompiledCo return reinterpret_cast<CompiledMethod*>(this); } - ArrayRef<const uint8_t> GetThunkCode(const linker::LinkerPatch& patch ATTRIBUTE_UNUSED, - /*out*/ std::string* debug_name ATTRIBUTE_UNUSED) override { + ArrayRef<const uint8_t> GetThunkCode([[maybe_unused]] const linker::LinkerPatch& patch, + [[maybe_unused]] /*out*/ std::string* debug_name) override { LOG(FATAL) << "Unsupported."; UNREACHABLE(); } - void SetThunkCode(const linker::LinkerPatch& patch ATTRIBUTE_UNUSED, - ArrayRef<const uint8_t> code ATTRIBUTE_UNUSED, - const std::string& debug_name ATTRIBUTE_UNUSED) override { + void SetThunkCode([[maybe_unused]] const linker::LinkerPatch& patch, + [[maybe_unused]] ArrayRef<const uint8_t> code, + [[maybe_unused]] const std::string& debug_name) override { LOG(FATAL) << "Unsupported."; UNREACHABLE(); } diff --git a/compiler/compiler.h b/compiler/compiler.h index ce785bb769..6c317f7e02 100644 --- a/compiler/compiler.h +++ b/compiler/compiler.h @@ -73,12 +73,12 @@ class Compiler { const DexFile& dex_file, Handle<mirror::DexCache> dex_cache) const = 0; - virtual bool JitCompile(Thread* self ATTRIBUTE_UNUSED, - jit::JitCodeCache* code_cache ATTRIBUTE_UNUSED, - jit::JitMemoryRegion* region ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - CompilationKind compilation_kind ATTRIBUTE_UNUSED, - jit::JitLogger* jit_logger ATTRIBUTE_UNUSED) + virtual bool JitCompile([[maybe_unused]] Thread* self, + [[maybe_unused]] jit::JitCodeCache* code_cache, + [[maybe_unused]] jit::JitMemoryRegion* region, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] CompilationKind compilation_kind, + [[maybe_unused]] jit::JitLogger* jit_logger) REQUIRES_SHARED(Locks::mutator_lock_) { return false; } diff --git a/compiler/debug/elf_debug_writer.cc b/compiler/debug/elf_debug_writer.cc index 8f64d73aa7..505b6c5d8a 100644 --- a/compiler/debug/elf_debug_writer.cc +++ b/compiler/debug/elf_debug_writer.cc @@ -113,7 +113,7 @@ void WriteDebugInfo(ElfBuilder<ElfTypes>* builder, template <typename ElfTypes> static std::vector<uint8_t> MakeMiniDebugInfoInternal( InstructionSet isa, - const InstructionSetFeatures* features ATTRIBUTE_UNUSED, + [[maybe_unused]] const InstructionSetFeatures* features, typename ElfTypes::Addr text_section_address, size_t text_section_size, typename ElfTypes::Addr dex_section_address, @@ -172,11 +172,10 @@ std::vector<uint8_t> MakeMiniDebugInfo( } } -std::vector<uint8_t> MakeElfFileForJIT( - InstructionSet isa, - const InstructionSetFeatures* features ATTRIBUTE_UNUSED, - bool mini_debug_info, - const MethodDebugInfo& method_info) { +std::vector<uint8_t> MakeElfFileForJIT(InstructionSet isa, + [[maybe_unused]] const InstructionSetFeatures* features, + bool mini_debug_info, + const MethodDebugInfo& method_info) { using ElfTypes = ElfRuntimeTypes; CHECK_EQ(sizeof(ElfTypes::Addr), static_cast<size_t>(GetInstructionSetPointerSize(isa))); CHECK_EQ(method_info.is_code_address_text_relative, false); @@ -213,13 +212,12 @@ std::vector<uint8_t> MakeElfFileForJIT( DCHECK_EQ(sym.st_size, method_info.code_size); num_syms++; }); - reader.VisitDebugFrame([&](const Reader::CIE* cie ATTRIBUTE_UNUSED) { - num_cies++; - }, [&](const Reader::FDE* fde, const Reader::CIE* cie ATTRIBUTE_UNUSED) { - DCHECK_EQ(fde->sym_addr, method_info.code_address); - DCHECK_EQ(fde->sym_size, method_info.code_size); - num_fdes++; - }); + reader.VisitDebugFrame([&]([[maybe_unused]] const Reader::CIE* cie) { num_cies++; }, + [&](const Reader::FDE* fde, [[maybe_unused]] const Reader::CIE* cie) { + DCHECK_EQ(fde->sym_addr, method_info.code_address); + DCHECK_EQ(fde->sym_size, method_info.code_size); + num_fdes++; + }); DCHECK_EQ(num_syms, 1u); DCHECK_LE(num_cies, 1u); DCHECK_LE(num_fdes, 1u); @@ -302,18 +300,20 @@ std::vector<uint8_t> PackElfFileForJIT( // ART always produces the same CIE, so we copy the first one and ignore the rest. bool copied_cie = false; for (Reader& reader : readers) { - reader.VisitDebugFrame([&](const Reader::CIE* cie) { - if (!copied_cie) { - debug_frame->WriteFully(cie->data(), cie->size()); - copied_cie = true; - } - }, [&](const Reader::FDE* fde, const Reader::CIE* cie ATTRIBUTE_UNUSED) { - DCHECK(copied_cie); - DCHECK_EQ(fde->cie_pointer, 0); - if (!is_removed_symbol(fde->sym_addr)) { - debug_frame->WriteFully(fde->data(), fde->size()); - } - }); + reader.VisitDebugFrame( + [&](const Reader::CIE* cie) { + if (!copied_cie) { + debug_frame->WriteFully(cie->data(), cie->size()); + copied_cie = true; + } + }, + [&](const Reader::FDE* fde, [[maybe_unused]] const Reader::CIE* cie) { + DCHECK(copied_cie); + DCHECK_EQ(fde->cie_pointer, 0); + if (!is_removed_symbol(fde->sym_addr)) { + debug_frame->WriteFully(fde->data(), fde->size()); + } + }); } debug_frame->End(); @@ -348,9 +348,8 @@ std::vector<uint8_t> PackElfFileForJIT( std::vector<uint8_t> WriteDebugElfFileForClasses( InstructionSet isa, - const InstructionSetFeatures* features ATTRIBUTE_UNUSED, - const ArrayRef<mirror::Class*>& types) - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] const InstructionSetFeatures* features, + const ArrayRef<mirror::Class*>& types) REQUIRES_SHARED(Locks::mutator_lock_) { using ElfTypes = ElfRuntimeTypes; CHECK_EQ(sizeof(ElfTypes::Addr), static_cast<size_t>(GetInstructionSetPointerSize(isa))); std::vector<uint8_t> buffer; diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc index 2fd7a6b535..b99e78f2a2 100644 --- a/compiler/jni/jni_compiler_test.cc +++ b/compiler/jni/jni_compiler_test.cc @@ -175,9 +175,8 @@ size_t count_nonnull_refs_single_helper(T arg, // SFINAE for non-ref-types. Always 0. template <typename T> -size_t count_nonnull_refs_single_helper(T arg ATTRIBUTE_UNUSED, - typename std::enable_if<!jni_type_traits<T>::is_ref>::type* - = nullptr) { +size_t count_nonnull_refs_single_helper( + [[maybe_unused]] T arg, typename std::enable_if<!jni_type_traits<T>::is_ref>::type* = nullptr) { return 0; } @@ -597,10 +596,9 @@ struct ScopedCheckHandleScope { class CountReferencesVisitor : public RootVisitor { public: - void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED, + void VisitRoots([[maybe_unused]] mirror::Object*** roots, size_t count, - const RootInfo& info) override - REQUIRES_SHARED(Locks::mutator_lock_) { + const RootInfo& info) override REQUIRES_SHARED(Locks::mutator_lock_) { if (info.GetType() == art::RootType::kRootJavaFrame) { const JavaFrameRootInfo& jrfi = static_cast<const JavaFrameRootInfo&>(info); if (jrfi.GetVReg() == JavaFrameRootInfo::kNativeReferenceArgument) { @@ -610,10 +608,9 @@ class CountReferencesVisitor : public RootVisitor { } } - void VisitRoots(mirror::CompressedReference<mirror::Object>** roots ATTRIBUTE_UNUSED, - size_t count ATTRIBUTE_UNUSED, - const RootInfo& info) override - REQUIRES_SHARED(Locks::mutator_lock_) { + void VisitRoots([[maybe_unused]] mirror::CompressedReference<mirror::Object>** roots, + [[maybe_unused]] size_t count, + const RootInfo& info) override REQUIRES_SHARED(Locks::mutator_lock_) { CHECK_NE(info.GetType(), art::RootType::kRootJavaFrame); } @@ -986,8 +983,8 @@ void JniCompilerTest::CompileAndRunIntObjectObjectMethodImpl() { JNI_TEST(CompileAndRunIntObjectObjectMethod) int gJava_MyClassNatives_fooSII_calls[kJniKindCount] = {}; -jint Java_MyClassNatives_fooSII(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED, +jint Java_MyClassNatives_fooSII([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass, jint x, jint y) { gJava_MyClassNatives_fooSII_calls[gCurrentJni]++; @@ -1009,8 +1006,8 @@ void JniCompilerTest::CompileAndRunStaticIntIntMethodImpl() { JNI_TEST_CRITICAL(CompileAndRunStaticIntIntMethod) int gJava_MyClassNatives_fooSDD_calls[kJniKindCount] = {}; -jdouble Java_MyClassNatives_fooSDD(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED, +jdouble Java_MyClassNatives_fooSDD([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass, jdouble x, jdouble y) { gJava_MyClassNatives_fooSDD_calls[gCurrentJni]++; @@ -1682,8 +1679,8 @@ void JniCompilerTest::CompileAndRunFloatFloatMethodImpl() { JNI_TEST(CompileAndRunFloatFloatMethod) -void Java_MyClassNatives_checkParameterAlign(JNIEnv* env ATTRIBUTE_UNUSED, - jobject thisObj ATTRIBUTE_UNUSED, +void Java_MyClassNatives_checkParameterAlign([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jobject thisObj, jint i1, jlong l1) { EXPECT_EQ(i1, 1234); diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc index 9d0761d2f7..0f981dd6df 100644 --- a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc +++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc @@ -117,7 +117,7 @@ ArrayRef<const ManagedRegister> X86_64JniCallingConvention::ArgumentScratchRegis return scratch_regs; } -static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni ATTRIBUTE_UNUSED) { +static ManagedRegister ReturnRegisterForShorty(const char* shorty, [[maybe_unused]] bool jni) { if (shorty[0] == 'F' || shorty[0] == 'D') { return X86_64ManagedRegister::FromXmmRegister(XMM0); } else if (shorty[0] == 'J') { diff --git a/compiler/linker/output_stream_test.cc b/compiler/linker/output_stream_test.cc index 22b174fce6..6b62874643 100644 --- a/compiler/linker/output_stream_test.cc +++ b/compiler/linker/output_stream_test.cc @@ -107,13 +107,13 @@ TEST_F(OutputStreamTest, BufferedFlush) { flush_called(false) { } ~CheckingOutputStream() override {} - bool WriteFully(const void* buffer ATTRIBUTE_UNUSED, - size_t byte_count ATTRIBUTE_UNUSED) override { + bool WriteFully([[maybe_unused]] const void* buffer, + [[maybe_unused]] size_t byte_count) override { LOG(FATAL) << "UNREACHABLE"; UNREACHABLE(); } - off_t Seek(off_t offset ATTRIBUTE_UNUSED, Whence whence ATTRIBUTE_UNUSED) override { + off_t Seek([[maybe_unused]] off_t offset, [[maybe_unused]] Whence whence) override { LOG(FATAL) << "UNREACHABLE"; UNREACHABLE(); } diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index f90f17f8f5..cc8cb8962a 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -288,8 +288,8 @@ uint64_t CodeGenerator::GetJitClassRootIndex(TypeReference type_reference) { return code_generation_data_->GetJitClassRootIndex(type_reference); } -void CodeGenerator::EmitJitRootPatches(uint8_t* code ATTRIBUTE_UNUSED, - const uint8_t* roots_data ATTRIBUTE_UNUSED) { +void CodeGenerator::EmitJitRootPatches([[maybe_unused]] uint8_t* code, + [[maybe_unused]] const uint8_t* roots_data) { DCHECK(code_generation_data_ != nullptr); DCHECK_EQ(code_generation_data_->GetNumberOfJitStringRoots(), 0u); DCHECK_EQ(code_generation_data_->GetNumberOfJitClassRoots(), 0u); @@ -457,18 +457,18 @@ void CodeGenerator::Finalize(CodeAllocator* allocator) { } void CodeGenerator::EmitLinkerPatches( - ArenaVector<linker::LinkerPatch>* linker_patches ATTRIBUTE_UNUSED) { + [[maybe_unused]] ArenaVector<linker::LinkerPatch>* linker_patches) { // No linker patches by default. } -bool CodeGenerator::NeedsThunkCode(const linker::LinkerPatch& patch ATTRIBUTE_UNUSED) const { +bool CodeGenerator::NeedsThunkCode([[maybe_unused]] const linker::LinkerPatch& patch) const { // Code generators that create patches requiring thunk compilation should override this function. return false; } -void CodeGenerator::EmitThunkCode(const linker::LinkerPatch& patch ATTRIBUTE_UNUSED, - /*out*/ ArenaVector<uint8_t>* code ATTRIBUTE_UNUSED, - /*out*/ std::string* debug_name ATTRIBUTE_UNUSED) { +void CodeGenerator::EmitThunkCode([[maybe_unused]] const linker::LinkerPatch& patch, + [[maybe_unused]] /*out*/ ArenaVector<uint8_t>* code, + [[maybe_unused]] /*out*/ std::string* debug_name) { // Code generators that create patches requiring thunk compilation should override this function. LOG(FATAL) << "Unexpected call to EmitThunkCode()."; } diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index d530d08b98..fe81b3148f 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -1572,15 +1572,15 @@ size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_ return kArm64WordSize; } -size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED, - uint32_t reg_id ATTRIBUTE_UNUSED) { +size_t CodeGeneratorARM64::SaveFloatingPointRegister([[maybe_unused]] size_t stack_index, + [[maybe_unused]] uint32_t reg_id) { LOG(FATAL) << "FP registers shouldn't be saved/restored individually, " << "use SaveRestoreLiveRegistersHelper"; UNREACHABLE(); } -size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED, - uint32_t reg_id ATTRIBUTE_UNUSED) { +size_t CodeGeneratorARM64::RestoreFloatingPointRegister([[maybe_unused]] size_t stack_index, + [[maybe_unused]] uint32_t reg_id) { LOG(FATAL) << "FP registers shouldn't be saved/restored individually, " << "use SaveRestoreLiveRegistersHelper"; UNREACHABLE(); @@ -3687,7 +3687,7 @@ void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) { } void InstructionCodeGeneratorARM64::VisitDoubleConstant( - HDoubleConstant* constant ATTRIBUTE_UNUSED) { + [[maybe_unused]] HDoubleConstant* constant) { // Will be generated at use site. } @@ -3695,8 +3695,7 @@ void LocationsBuilderARM64::VisitExit(HExit* exit) { exit->SetLocations(nullptr); } -void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) { -} +void InstructionCodeGeneratorARM64::VisitExit([[maybe_unused]] HExit* exit) {} void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) { LocationSummary* locations = @@ -3704,7 +3703,7 @@ void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARM64::VisitFloatConstant([[maybe_unused]] HFloatConstant* constant) { // Will be generated at use site. } @@ -4565,7 +4564,7 @@ void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARM64::VisitIntConstant([[maybe_unused]] HIntConstant* constant) { // Will be generated at use site. } @@ -4574,7 +4573,7 @@ void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARM64::VisitNullConstant([[maybe_unused]] HNullConstant* constant) { // Will be generated at use site. } @@ -4749,8 +4748,8 @@ static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codege } HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch( - const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, - ArtMethod* method ATTRIBUTE_UNUSED) { + const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, + [[maybe_unused]] ArtMethod* method) { // On ARM64 we support all dispatch types. return desired_dispatch_info; } @@ -5639,7 +5638,7 @@ void LocationsBuilderARM64::VisitClearException(HClearException* clear) { new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall); } -void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARM64::VisitClearException([[maybe_unused]] HClearException* clear) { __ Str(wzr, GetExceptionTlsAddress()); } @@ -5769,7 +5768,7 @@ void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARM64::VisitLongConstant([[maybe_unused]] HLongConstant* constant) { // Will be generated at use site. } @@ -5969,7 +5968,7 @@ void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) { HandleBinaryOp(instruction); } -void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) { +void LocationsBuilderARM64::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) { LOG(FATAL) << "Unreachable"; } @@ -5996,7 +5995,7 @@ void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) { } void InstructionCodeGeneratorARM64::VisitParameterValue( - HParameterValue* instruction ATTRIBUTE_UNUSED) { + [[maybe_unused]] HParameterValue* instruction) { // Nothing to do, the parameter is already at its location. } @@ -6007,7 +6006,7 @@ void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) { } void InstructionCodeGeneratorARM64::VisitCurrentMethod( - HCurrentMethod* instruction ATTRIBUTE_UNUSED) { + [[maybe_unused]] HCurrentMethod* instruction) { // Nothing to do, the method is already at its location. } @@ -6019,7 +6018,7 @@ void LocationsBuilderARM64::VisitPhi(HPhi* instruction) { locations->SetOut(Location::Any()); } -void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARM64::VisitPhi([[maybe_unused]] HPhi* instruction) { LOG(FATAL) << "Unreachable"; } @@ -6214,7 +6213,7 @@ void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor } void InstructionCodeGeneratorARM64::VisitConstructorFence( - HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) { + [[maybe_unused]] HConstructorFence* constructor_fence) { codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore); } @@ -6254,7 +6253,7 @@ void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) { instruction->SetLocations(nullptr); } -void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARM64::VisitReturnVoid([[maybe_unused]] HReturnVoid* instruction) { codegen_->GenerateFrameExit(); } @@ -6506,12 +6505,12 @@ void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) { HandleBinaryOp(instruction); } -void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { +void LocationsBuilderARM64::VisitBoundType([[maybe_unused]] HBoundType* instruction) { // Nothing to do, this should be removed during prepare for register allocator. LOG(FATAL) << "Unreachable"; } -void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARM64::VisitBoundType([[maybe_unused]] HBoundType* instruction) { // Nothing to do, this should be removed during prepare for register allocator. LOG(FATAL) << "Unreachable"; } diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index 6190364d1d..b2563071cd 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -304,16 +304,16 @@ class FieldAccessCallingConventionARM64 : public FieldAccessCallingConvention { Location GetFieldIndexLocation() const override { return helpers::LocationFrom(vixl::aarch64::x0); } - Location GetReturnLocation(DataType::Type type ATTRIBUTE_UNUSED) const override { + Location GetReturnLocation([[maybe_unused]] DataType::Type type) const override { return helpers::LocationFrom(vixl::aarch64::x0); } - Location GetSetValueLocation(DataType::Type type ATTRIBUTE_UNUSED, + Location GetSetValueLocation([[maybe_unused]] DataType::Type type, bool is_instance) const override { return is_instance ? helpers::LocationFrom(vixl::aarch64::x2) : helpers::LocationFrom(vixl::aarch64::x1); } - Location GetFpuLocation(DataType::Type type ATTRIBUTE_UNUSED) const override { + Location GetFpuLocation([[maybe_unused]] DataType::Type type) const override { return helpers::LocationFrom(vixl::aarch64::d0); } @@ -737,9 +737,7 @@ class CodeGeneratorARM64 : public CodeGenerator { ParallelMoveResolverARM64* GetMoveResolver() override { return &move_resolver_; } - bool NeedsTwoRegisters(DataType::Type type ATTRIBUTE_UNUSED) const override { - return false; - } + bool NeedsTwoRegisters([[maybe_unused]] DataType::Type type) const override { return false; } // Check if the desired_string_load_kind is supported. If it is, return it, // otherwise return a fall-back kind that should be used instead. diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc index ecc7a68b94..b61f6b51f5 100644 --- a/compiler/optimizing/code_generator_arm_vixl.cc +++ b/compiler/optimizing/code_generator_arm_vixl.cc @@ -1103,27 +1103,27 @@ static uint32_t ComputeSRegisterListMask(const SRegisterList& regs) { } // Saves the register in the stack. Returns the size taken on stack. -size_t CodeGeneratorARMVIXL::SaveCoreRegister(size_t stack_index ATTRIBUTE_UNUSED, - uint32_t reg_id ATTRIBUTE_UNUSED) { +size_t CodeGeneratorARMVIXL::SaveCoreRegister([[maybe_unused]] size_t stack_index, + [[maybe_unused]] uint32_t reg_id) { TODO_VIXL32(FATAL); UNREACHABLE(); } // Restores the register from the stack. Returns the size taken on stack. -size_t CodeGeneratorARMVIXL::RestoreCoreRegister(size_t stack_index ATTRIBUTE_UNUSED, - uint32_t reg_id ATTRIBUTE_UNUSED) { +size_t CodeGeneratorARMVIXL::RestoreCoreRegister([[maybe_unused]] size_t stack_index, + [[maybe_unused]] uint32_t reg_id) { TODO_VIXL32(FATAL); UNREACHABLE(); } -size_t CodeGeneratorARMVIXL::SaveFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED, - uint32_t reg_id ATTRIBUTE_UNUSED) { +size_t CodeGeneratorARMVIXL::SaveFloatingPointRegister([[maybe_unused]] size_t stack_index, + [[maybe_unused]] uint32_t reg_id) { TODO_VIXL32(FATAL); UNREACHABLE(); } -size_t CodeGeneratorARMVIXL::RestoreFloatingPointRegister(size_t stack_index ATTRIBUTE_UNUSED, - uint32_t reg_id ATTRIBUTE_UNUSED) { +size_t CodeGeneratorARMVIXL::RestoreFloatingPointRegister([[maybe_unused]] size_t stack_index, + [[maybe_unused]] uint32_t reg_id) { TODO_VIXL32(FATAL); UNREACHABLE(); } @@ -2873,8 +2873,7 @@ void LocationsBuilderARMVIXL::VisitExit(HExit* exit) { exit->SetLocations(nullptr); } -void InstructionCodeGeneratorARMVIXL::VisitExit(HExit* exit ATTRIBUTE_UNUSED) { -} +void InstructionCodeGeneratorARMVIXL::VisitExit([[maybe_unused]] HExit* exit) {} void InstructionCodeGeneratorARMVIXL::GenerateCompareTestAndBranch(HCondition* condition, vixl32::Label* true_target, @@ -3471,7 +3470,7 @@ void LocationsBuilderARMVIXL::VisitIntConstant(HIntConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorARMVIXL::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARMVIXL::VisitIntConstant([[maybe_unused]] HIntConstant* constant) { // Will be generated at use site. } @@ -3481,7 +3480,7 @@ void LocationsBuilderARMVIXL::VisitNullConstant(HNullConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorARMVIXL::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARMVIXL::VisitNullConstant([[maybe_unused]] HNullConstant* constant) { // Will be generated at use site. } @@ -3491,7 +3490,7 @@ void LocationsBuilderARMVIXL::VisitLongConstant(HLongConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorARMVIXL::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARMVIXL::VisitLongConstant([[maybe_unused]] HLongConstant* constant) { // Will be generated at use site. } @@ -3502,7 +3501,7 @@ void LocationsBuilderARMVIXL::VisitFloatConstant(HFloatConstant* constant) { } void InstructionCodeGeneratorARMVIXL::VisitFloatConstant( - HFloatConstant* constant ATTRIBUTE_UNUSED) { + [[maybe_unused]] HFloatConstant* constant) { // Will be generated at use site. } @@ -3513,7 +3512,7 @@ void LocationsBuilderARMVIXL::VisitDoubleConstant(HDoubleConstant* constant) { } void InstructionCodeGeneratorARMVIXL::VisitDoubleConstant( - HDoubleConstant* constant ATTRIBUTE_UNUSED) { + [[maybe_unused]] HDoubleConstant* constant) { // Will be generated at use site. } @@ -3522,7 +3521,7 @@ void LocationsBuilderARMVIXL::VisitConstructorFence(HConstructorFence* construct } void InstructionCodeGeneratorARMVIXL::VisitConstructorFence( - HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) { + [[maybe_unused]] HConstructorFence* constructor_fence) { codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore); } @@ -3538,7 +3537,7 @@ void LocationsBuilderARMVIXL::VisitReturnVoid(HReturnVoid* ret) { ret->SetLocations(nullptr); } -void InstructionCodeGeneratorARMVIXL::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARMVIXL::VisitReturnVoid([[maybe_unused]] HReturnVoid* ret) { codegen_->GenerateFrameExit(); } @@ -5666,7 +5665,7 @@ void LocationsBuilderARMVIXL::VisitParameterValue(HParameterValue* instruction) } void InstructionCodeGeneratorARMVIXL::VisitParameterValue( - HParameterValue* instruction ATTRIBUTE_UNUSED) { + [[maybe_unused]] HParameterValue* instruction) { // Nothing to do, the parameter is already at its location. } @@ -5677,7 +5676,7 @@ void LocationsBuilderARMVIXL::VisitCurrentMethod(HCurrentMethod* instruction) { } void InstructionCodeGeneratorARMVIXL::VisitCurrentMethod( - HCurrentMethod* instruction ATTRIBUTE_UNUSED) { + [[maybe_unused]] HCurrentMethod* instruction) { // Nothing to do, the method is already at its location. } @@ -5818,7 +5817,7 @@ void LocationsBuilderARMVIXL::VisitPhi(HPhi* instruction) { locations->SetOut(Location::Any()); } -void InstructionCodeGeneratorARMVIXL::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARMVIXL::VisitPhi([[maybe_unused]] HPhi* instruction) { LOG(FATAL) << "Unreachable"; } @@ -7282,7 +7281,7 @@ void CodeGeneratorARMVIXL::MarkGCCard(vixl32::Register temp, } } -void LocationsBuilderARMVIXL::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) { +void LocationsBuilderARMVIXL::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) { LOG(FATAL) << "Unreachable"; } @@ -7991,7 +7990,7 @@ void LocationsBuilderARMVIXL::VisitClearException(HClearException* clear) { new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall); } -void InstructionCodeGeneratorARMVIXL::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARMVIXL::VisitClearException([[maybe_unused]] HClearException* clear) { UseScratchRegisterScope temps(GetVIXLAssembler()); vixl32::Register temp = temps.Acquire(); __ Mov(temp, 0); @@ -9914,12 +9913,12 @@ void InstructionCodeGeneratorARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulat } } -void LocationsBuilderARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { +void LocationsBuilderARMVIXL::VisitBoundType([[maybe_unused]] HBoundType* instruction) { // Nothing to do, this should be removed during prepare for register allocator. LOG(FATAL) << "Unreachable"; } -void InstructionCodeGeneratorARMVIXL::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorARMVIXL::VisitBoundType([[maybe_unused]] HBoundType* instruction) { // Nothing to do, this should be removed during prepare for register allocator. LOG(FATAL) << "Unreachable"; } diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index bcf5ea0907..3ae6515c13 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -1919,8 +1919,7 @@ void LocationsBuilderX86::VisitExit(HExit* exit) { exit->SetLocations(nullptr); } -void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) { -} +void InstructionCodeGeneratorX86::VisitExit([[maybe_unused]] HExit* exit) {} template<class LabelType> void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond, @@ -2560,7 +2559,7 @@ void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitIntConstant([[maybe_unused]] HIntConstant* constant) { // Will be generated at use site. } @@ -2570,7 +2569,7 @@ void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitNullConstant([[maybe_unused]] HNullConstant* constant) { // Will be generated at use site. } @@ -2580,7 +2579,7 @@ void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitLongConstant([[maybe_unused]] HLongConstant* constant) { // Will be generated at use site. } @@ -2590,7 +2589,7 @@ void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitFloatConstant([[maybe_unused]] HFloatConstant* constant) { // Will be generated at use site. } @@ -2600,7 +2599,7 @@ void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitDoubleConstant([[maybe_unused]] HDoubleConstant* constant) { // Will be generated at use site. } @@ -2609,7 +2608,7 @@ void LocationsBuilderX86::VisitConstructorFence(HConstructorFence* constructor_f } void InstructionCodeGeneratorX86::VisitConstructorFence( - HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) { + [[maybe_unused]] HConstructorFence* constructor_fence) { codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore); } @@ -2625,7 +2624,7 @@ void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) { ret->SetLocations(nullptr); } -void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitReturnVoid([[maybe_unused]] HReturnVoid* ret) { codegen_->GenerateFrameExit(); } @@ -5140,8 +5139,7 @@ void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) { } void InstructionCodeGeneratorX86::VisitParameterValue( - HParameterValue* instruction ATTRIBUTE_UNUSED) { -} + [[maybe_unused]] HParameterValue* instruction) {} void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) { LocationSummary* locations = @@ -5149,7 +5147,7 @@ void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) { locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument)); } -void InstructionCodeGeneratorX86::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitCurrentMethod([[maybe_unused]] HCurrentMethod* instruction) { } void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) { @@ -5348,7 +5346,7 @@ void LocationsBuilderX86::VisitPhi(HPhi* instruction) { locations->SetOut(Location::Any()); } -void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitPhi([[maybe_unused]] HPhi* instruction) { LOG(FATAL) << "Unreachable"; } @@ -5377,8 +5375,8 @@ void CodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) { } HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch( - const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, - ArtMethod* method ATTRIBUTE_UNUSED) { + const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, + [[maybe_unused]] ArtMethod* method) { return desired_dispatch_info; } @@ -6803,7 +6801,7 @@ void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) { } } -void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) { +void LocationsBuilderX86::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) { LOG(FATAL) << "Unreachable"; } @@ -7551,7 +7549,7 @@ void LocationsBuilderX86::VisitClearException(HClearException* clear) { new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall); } -void InstructionCodeGeneratorX86::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitClearException([[maybe_unused]] HClearException* clear) { __ fs()->movl(GetExceptionTlsAddress(), Immediate(0)); } @@ -8637,12 +8635,12 @@ void CodeGeneratorX86::GenerateReadBarrierForRootSlow(HInstruction* instruction, __ Bind(slow_path->GetExitLabel()); } -void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { +void LocationsBuilderX86::VisitBoundType([[maybe_unused]] HBoundType* instruction) { // Nothing to do, this should be removed during prepare for register allocator. LOG(FATAL) << "Unreachable"; } -void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitBoundType([[maybe_unused]] HBoundType* instruction) { // Nothing to do, this should be removed during prepare for register allocator. LOG(FATAL) << "Unreachable"; } @@ -9097,13 +9095,13 @@ void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_da } } -void LocationsBuilderX86::VisitIntermediateAddress(HIntermediateAddress* instruction - ATTRIBUTE_UNUSED) { +void LocationsBuilderX86::VisitIntermediateAddress( + [[maybe_unused]] HIntermediateAddress* instruction) { LOG(FATAL) << "Unreachable"; } -void InstructionCodeGeneratorX86::VisitIntermediateAddress(HIntermediateAddress* instruction - ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86::VisitIntermediateAddress( + [[maybe_unused]] HIntermediateAddress* instruction) { LOG(FATAL) << "Unreachable"; } diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h index d27155f31d..0905f32d80 100644 --- a/compiler/optimizing/code_generator_x86.h +++ b/compiler/optimizing/code_generator_x86.h @@ -196,7 +196,7 @@ class FieldAccessCallingConventionX86 : public FieldAccessCallingConvention { ? Location::RegisterLocation(EDX) : Location::RegisterLocation(ECX)); } - Location GetFpuLocation(DataType::Type type ATTRIBUTE_UNUSED) const override { + Location GetFpuLocation([[maybe_unused]] DataType::Type type) const override { return Location::FpuRegisterLocation(XMM0); } diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 2e03f1f3b4..7c61519d00 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -1072,8 +1072,8 @@ void CodeGeneratorX86_64::BlockNonVolatileXmmRegisters(LocationSummary* location } HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch( - const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, - ArtMethod* method ATTRIBUTE_UNUSED) { + const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info, + [[maybe_unused]] ArtMethod* method) { return desired_dispatch_info; } @@ -2002,8 +2002,9 @@ void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) { Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value)); } -void CodeGeneratorX86_64::MoveLocation( - Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) { +void CodeGeneratorX86_64::MoveLocation(Location dst, + Location src, + [[maybe_unused]] DataType::Type dst_type) { Move(dst, src); } @@ -2062,8 +2063,7 @@ void LocationsBuilderX86_64::VisitExit(HExit* exit) { exit->SetLocations(nullptr); } -void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) { -} +void InstructionCodeGeneratorX86_64::VisitExit([[maybe_unused]] HExit* exit) {} template<class LabelType> void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond, @@ -2710,7 +2710,7 @@ void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitIntConstant([[maybe_unused]] HIntConstant* constant) { // Will be generated at use site. } @@ -2720,7 +2720,7 @@ void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitNullConstant([[maybe_unused]] HNullConstant* constant) { // Will be generated at use site. } @@ -2730,7 +2730,7 @@ void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitLongConstant([[maybe_unused]] HLongConstant* constant) { // Will be generated at use site. } @@ -2740,7 +2740,7 @@ void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) { locations->SetOut(Location::ConstantLocation(constant)); } -void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitFloatConstant([[maybe_unused]] HFloatConstant* constant) { // Will be generated at use site. } @@ -2751,7 +2751,7 @@ void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) { } void InstructionCodeGeneratorX86_64::VisitDoubleConstant( - HDoubleConstant* constant ATTRIBUTE_UNUSED) { + [[maybe_unused]] HDoubleConstant* constant) { // Will be generated at use site. } @@ -2760,7 +2760,7 @@ void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructo } void InstructionCodeGeneratorX86_64::VisitConstructorFence( - HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) { + [[maybe_unused]] HConstructorFence* constructor_fence) { codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore); } @@ -2776,7 +2776,7 @@ void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) { ret->SetLocations(nullptr); } -void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitReturnVoid([[maybe_unused]] HReturnVoid* ret) { codegen_->GenerateFrameExit(); } @@ -5025,7 +5025,7 @@ void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) { } void InstructionCodeGeneratorX86_64::VisitParameterValue( - HParameterValue* instruction ATTRIBUTE_UNUSED) { + [[maybe_unused]] HParameterValue* instruction) { // Nothing to do, the parameter is already at its location. } @@ -5036,7 +5036,7 @@ void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) { } void InstructionCodeGeneratorX86_64::VisitCurrentMethod( - HCurrentMethod* instruction ATTRIBUTE_UNUSED) { + [[maybe_unused]] HCurrentMethod* instruction) { // Nothing to do, the method is already at its location. } @@ -5115,7 +5115,7 @@ void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) { locations->SetOut(Location::Any()); } -void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitPhi([[maybe_unused]] HPhi* instruction) { LOG(FATAL) << "Unimplemented"; } @@ -6136,7 +6136,7 @@ void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp, } } -void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) { +void LocationsBuilderX86_64::VisitParallelMove([[maybe_unused]] HParallelMove* instruction) { LOG(FATAL) << "Unimplemented"; } @@ -6811,7 +6811,7 @@ void LocationsBuilderX86_64::VisitClearException(HClearException* clear) { new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall); } -void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitClearException([[maybe_unused]] HClearException* clear) { __ gs()->movl(GetExceptionTlsAddress(), Immediate(0)); } @@ -7881,12 +7881,12 @@ void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instructi __ Bind(slow_path->GetExitLabel()); } -void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { +void LocationsBuilderX86_64::VisitBoundType([[maybe_unused]] HBoundType* instruction) { // Nothing to do, this should be removed during prepare for register allocator. LOG(FATAL) << "Unreachable"; } -void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitBoundType([[maybe_unused]] HBoundType* instruction) { // Nothing to do, this should be removed during prepare for register allocator. LOG(FATAL) << "Unreachable"; } @@ -7981,13 +7981,13 @@ void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_ins __ jmp(temp_reg); } -void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction - ATTRIBUTE_UNUSED) { +void LocationsBuilderX86_64::VisitIntermediateAddress( + [[maybe_unused]] HIntermediateAddress* instruction) { LOG(FATAL) << "Unreachable"; } -void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction - ATTRIBUTE_UNUSED) { +void InstructionCodeGeneratorX86_64::VisitIntermediateAddress( + [[maybe_unused]] HIntermediateAddress* instruction) { LOG(FATAL) << "Unreachable"; } diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index dff2e799e0..e1ce3a9a9b 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -162,16 +162,16 @@ class FieldAccessCallingConventionX86_64 : public FieldAccessCallingConvention { Location GetFieldIndexLocation() const override { return Location::RegisterLocation(RDI); } - Location GetReturnLocation(DataType::Type type ATTRIBUTE_UNUSED) const override { + Location GetReturnLocation([[maybe_unused]] DataType::Type type) const override { return Location::RegisterLocation(RAX); } - Location GetSetValueLocation(DataType::Type type ATTRIBUTE_UNUSED, bool is_instance) - const override { + Location GetSetValueLocation([[maybe_unused]] DataType::Type type, + bool is_instance) const override { return is_instance ? Location::RegisterLocation(RDX) : Location::RegisterLocation(RSI); } - Location GetFpuLocation(DataType::Type type ATTRIBUTE_UNUSED) const override { + Location GetFpuLocation([[maybe_unused]] DataType::Type type) const override { return Location::FpuRegisterLocation(XMM0); } @@ -502,9 +502,7 @@ class CodeGeneratorX86_64 : public CodeGenerator { block_labels_ = CommonInitializeLabels<Label>(); } - bool NeedsTwoRegisters(DataType::Type type ATTRIBUTE_UNUSED) const override { - return false; - } + bool NeedsTwoRegisters([[maybe_unused]] DataType::Type type) const override { return false; } // Check if the desired_string_load_kind is supported. If it is, return it, // otherwise return a fall-back kind that should be used instead. diff --git a/compiler/optimizing/codegen_test_utils.h b/compiler/optimizing/codegen_test_utils.h index 7af9d0f44c..53163daabb 100644 --- a/compiler/optimizing/codegen_test_utils.h +++ b/compiler/optimizing/codegen_test_utils.h @@ -103,8 +103,8 @@ class TestCodeGeneratorARMVIXL : public arm::CodeGeneratorARMVIXL { blocked_core_registers_[arm::R7] = false; } - void MaybeGenerateMarkingRegisterCheck(int code ATTRIBUTE_UNUSED, - Location temp_loc ATTRIBUTE_UNUSED) override { + void MaybeGenerateMarkingRegisterCheck([[maybe_unused]] int code, + [[maybe_unused]] Location temp_loc) override { // When turned on, the marking register checks in // CodeGeneratorARMVIXL::MaybeGenerateMarkingRegisterCheck expects the // Thread Register and the Marking Register to be set to @@ -135,8 +135,8 @@ class TestCodeGeneratorARM64 : public arm64::CodeGeneratorARM64 { TestCodeGeneratorARM64(HGraph* graph, const CompilerOptions& compiler_options) : arm64::CodeGeneratorARM64(graph, compiler_options) {} - void MaybeGenerateMarkingRegisterCheck(int codem ATTRIBUTE_UNUSED, - Location temp_loc ATTRIBUTE_UNUSED) override { + void MaybeGenerateMarkingRegisterCheck([[maybe_unused]] int codem, + [[maybe_unused]] Location temp_loc) override { // When turned on, the marking register checks in // CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck expect the // Thread Register and the Marking Register to be set to diff --git a/compiler/optimizing/constructor_fence_redundancy_elimination.cc b/compiler/optimizing/constructor_fence_redundancy_elimination.cc index d9b7652f32..48635cfd15 100644 --- a/compiler/optimizing/constructor_fence_redundancy_elimination.cc +++ b/compiler/optimizing/constructor_fence_redundancy_elimination.cc @@ -78,7 +78,7 @@ class CFREVisitor final : public HGraphVisitor { VisitSetLocation(instruction, value); } - void VisitDeoptimize(HDeoptimize* instruction ATTRIBUTE_UNUSED) override { + void VisitDeoptimize([[maybe_unused]] HDeoptimize* instruction) override { // Pessimize: Merge all fences. MergeCandidateFences(); } @@ -151,7 +151,7 @@ class CFREVisitor final : public HGraphVisitor { } } - void VisitSetLocation(HInstruction* inst ATTRIBUTE_UNUSED, HInstruction* store_input) { + void VisitSetLocation([[maybe_unused]] HInstruction* inst, HInstruction* store_input) { // An object is considered "published" if it's stored onto the heap. // Sidenote: A later "LSE" pass can still remove the fence if it proves the // object doesn't actually escape. diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h index 893cd04411..8cb7f6ac4f 100644 --- a/compiler/optimizing/intrinsics.h +++ b/compiler/optimizing/intrinsics.h @@ -60,8 +60,7 @@ class IntrinsicVisitor : public ValueObject { // Define visitor methods. #define OPTIMIZING_INTRINSICS(Name, ...) \ - virtual void Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ - } + virtual void Visit##Name([[maybe_unused]] HInvoke* invoke) {} #include "intrinsics_list.h" INTRINSICS_LIST(OPTIMIZING_INTRINSICS) #undef INTRINSICS_LIST @@ -254,11 +253,9 @@ class VarHandleOptimizations : public IntrinsicOptimizations { // intrinsic to exploit e.g. no side-effects or exceptions, but otherwise not handled // by this architecture-specific intrinsics code generator. Eventually it is implemented // as a true method call. -#define UNIMPLEMENTED_INTRINSIC(Arch, Name) \ -void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ -} \ -void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ -} +#define UNIMPLEMENTED_INTRINSIC(Arch, Name) \ + void IntrinsicLocationsBuilder##Arch::Visit##Name([[maybe_unused]] HInvoke* invoke) {} \ + void IntrinsicCodeGenerator##Arch::Visit##Name([[maybe_unused]] HInvoke* invoke) {} // Defines a list of unreached intrinsics: that is, method calls that are recognized as // an intrinsic, and then always converted into HIR instructions before they reach any diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index fb9a419a7e..2ec2134fb1 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -3676,7 +3676,7 @@ void IntrinsicLocationsBuilderARM64::VisitReachabilityFence(HInvoke* invoke) { locations->SetInAt(0, Location::Any()); } -void IntrinsicCodeGeneratorARM64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } +void IntrinsicCodeGeneratorARM64::VisitReachabilityFence([[maybe_unused]] HInvoke* invoke) {} void IntrinsicLocationsBuilderARM64::VisitCRC32Update(HInvoke* invoke) { if (!codegen_->GetInstructionSetFeatures().HasCRC()) { diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc index 366a4683d1..d31593cf9f 100644 --- a/compiler/optimizing/intrinsics_arm_vixl.cc +++ b/compiler/optimizing/intrinsics_arm_vixl.cc @@ -2653,7 +2653,7 @@ void IntrinsicLocationsBuilderARMVIXL::VisitReachabilityFence(HInvoke* invoke) { locations->SetInAt(0, Location::Any()); } -void IntrinsicCodeGeneratorARMVIXL::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } +void IntrinsicCodeGeneratorARMVIXL::VisitReachabilityFence([[maybe_unused]] HInvoke* invoke) {} void IntrinsicLocationsBuilderARMVIXL::VisitIntegerDivideUnsigned(HInvoke* invoke) { CreateIntIntToIntSlowPathCallLocations(allocator_, invoke); diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index a93a8b581f..02f312e74e 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -3503,7 +3503,7 @@ void IntrinsicLocationsBuilderX86::VisitReachabilityFence(HInvoke* invoke) { locations->SetInAt(0, Location::Any()); } -void IntrinsicCodeGeneratorX86::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } +void IntrinsicCodeGeneratorX86::VisitReachabilityFence([[maybe_unused]] HInvoke* invoke) {} void IntrinsicLocationsBuilderX86::VisitIntegerDivideUnsigned(HInvoke* invoke) { LocationSummary* locations = new (allocator_) LocationSummary(invoke, diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index a573db806f..99da84408a 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -1871,7 +1871,7 @@ void IntrinsicCodeGeneratorX86_64::VisitThreadCurrentThread(HInvoke* invoke) { static void GenUnsafeGet(HInvoke* invoke, DataType::Type type, - bool is_volatile ATTRIBUTE_UNUSED, + [[maybe_unused]] bool is_volatile, CodeGeneratorX86_64* codegen) { X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler()); LocationSummary* locations = invoke->GetLocations(); @@ -3249,7 +3249,7 @@ void IntrinsicLocationsBuilderX86_64::VisitReachabilityFence(HInvoke* invoke) { locations->SetInAt(0, Location::Any()); } -void IntrinsicCodeGeneratorX86_64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } +void IntrinsicCodeGeneratorX86_64::VisitReachabilityFence([[maybe_unused]] HInvoke* invoke) {} static void CreateDivideUnsignedLocations(HInvoke* invoke, ArenaAllocator* allocator) { LocationSummary* locations = diff --git a/compiler/optimizing/loop_analysis.h b/compiler/optimizing/loop_analysis.h index cec00fecf4..cd8f00588d 100644 --- a/compiler/optimizing/loop_analysis.h +++ b/compiler/optimizing/loop_analysis.h @@ -148,13 +148,15 @@ class ArchNoOptsLoopHelper : public ArenaObject<kArenaAllocOptimization> { // // Returns 'true' by default, should be overridden by particular target loop helper. virtual bool IsLoopNonBeneficialForScalarOpts( - LoopAnalysisInfo* loop_analysis_info ATTRIBUTE_UNUSED) const { return true; } + [[maybe_unused]] LoopAnalysisInfo* loop_analysis_info) const { + return true; + } // Returns optimal scalar unrolling factor for the loop. // // Returns kNoUnrollingFactor by default, should be overridden by particular target loop helper. virtual uint32_t GetScalarUnrollingFactor( - const LoopAnalysisInfo* analysis_info ATTRIBUTE_UNUSED) const { + [[maybe_unused]] const LoopAnalysisInfo* analysis_info) const { return LoopAnalysisInfo::kNoUnrollingFactor; } @@ -166,17 +168,17 @@ class ArchNoOptsLoopHelper : public ArenaObject<kArenaAllocOptimization> { // Returns whether it is beneficial to fully unroll the loop. // // Returns 'false' by default, should be overridden by particular target loop helper. - virtual bool IsFullUnrollingBeneficial(LoopAnalysisInfo* analysis_info ATTRIBUTE_UNUSED) const { + virtual bool IsFullUnrollingBeneficial([[maybe_unused]] LoopAnalysisInfo* analysis_info) const { return false; } // Returns optimal SIMD unrolling factor for the loop. // // Returns kNoUnrollingFactor by default, should be overridden by particular target loop helper. - virtual uint32_t GetSIMDUnrollingFactor(HBasicBlock* block ATTRIBUTE_UNUSED, - int64_t trip_count ATTRIBUTE_UNUSED, - uint32_t max_peel ATTRIBUTE_UNUSED, - uint32_t vector_length ATTRIBUTE_UNUSED) const { + virtual uint32_t GetSIMDUnrollingFactor([[maybe_unused]] HBasicBlock* block, + [[maybe_unused]] int64_t trip_count, + [[maybe_unused]] uint32_t max_peel, + [[maybe_unused]] uint32_t vector_length) const { return LoopAnalysisInfo::kNoUnrollingFactor; } diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 4a0be10b77..01fbc997af 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -2355,9 +2355,7 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { return true; } - virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const { - return false; - } + virtual bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const { return false; } // If this instruction will do an implicit null check, return the `HNullCheck` associated // with it. Otherwise return null. @@ -2570,7 +2568,7 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { // // Note: HEnvironment and some other fields are not copied and are set to default values, see // 'explicit HInstruction(const HInstruction& other)' for details. - virtual HInstruction* Clone(ArenaAllocator* arena ATTRIBUTE_UNUSED) const { + virtual HInstruction* Clone([[maybe_unused]] ArenaAllocator* arena) const { LOG(FATAL) << "Cloning is not implemented for the instruction " << DebugName() << " " << GetId(); UNREACHABLE(); @@ -2598,7 +2596,7 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { // Returns whether any data encoded in the two instructions is equal. // This method does not look at the inputs. Both instructions must be // of the same type, otherwise the method has undefined behavior. - virtual bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const { + virtual bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const { return false; } @@ -3272,7 +3270,7 @@ class HConstant : public HExpression<0> { class HNullConstant final : public HConstant { public: - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -3830,7 +3828,7 @@ class HUnaryOperation : public HExpression<1> { DataType::Type GetResultType() const { return GetType(); } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -3906,7 +3904,7 @@ class HBinaryOperation : public HExpression<2> { } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -3916,15 +3914,15 @@ class HBinaryOperation : public HExpression<2> { HConstant* TryStaticEvaluation() const; // Apply this operation to `x` and `y`. - virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, - HNullConstant* y ATTRIBUTE_UNUSED) const { + virtual HConstant* Evaluate([[maybe_unused]] HNullConstant* x, + [[maybe_unused]] HNullConstant* y) const { LOG(FATAL) << DebugName() << " is not defined for the (null, null) case."; UNREACHABLE(); } virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0; virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0; - virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED, - HIntConstant* y ATTRIBUTE_UNUSED) const { + virtual HConstant* Evaluate([[maybe_unused]] HLongConstant* x, + [[maybe_unused]] HIntConstant* y) const { LOG(FATAL) << DebugName() << " is not defined for the (long, int) case."; UNREACHABLE(); } @@ -4052,8 +4050,8 @@ class HEqual final : public HCondition { bool IsCommutative() const override { return true; } - HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, - HNullConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HNullConstant* x, + [[maybe_unused]] HNullConstant* y) const override { return MakeConstantCondition(true, GetDexPc()); } HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override { @@ -4099,8 +4097,8 @@ class HNotEqual final : public HCondition { bool IsCommutative() const override { return true; } - HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED, - HNullConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HNullConstant* x, + [[maybe_unused]] HNullConstant* y) const override { return MakeConstantCondition(false, GetDexPc()); } HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override { @@ -4306,13 +4304,13 @@ class HBelow final : public HCondition { HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override { return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -4348,13 +4346,13 @@ class HBelowOrEqual final : public HCondition { HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override { return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -4390,13 +4388,13 @@ class HAbove final : public HCondition { HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override { return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -4432,13 +4430,13 @@ class HAboveOrEqual final : public HCondition { HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override { return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -4525,7 +4523,7 @@ class HCompare final : public HBinaryOperation { return GetBias() == ComparisonBias::kGtBias; } - static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type type ATTRIBUTE_UNUSED) { + static SideEffects SideEffectsForArchRuntimeCalls([[maybe_unused]] DataType::Type type) { // Comparisons do not require a runtime call in any back end. return SideEffects::None(); } @@ -5018,7 +5016,7 @@ class HInvokeStaticOrDirect final : public HInvoke { return input_records; } - bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override { + bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override { // We do not access the method via object reference, so we cannot do an implicit null check. // TODO: for intrinsics we can generate implicit null checks. return false; @@ -5602,10 +5600,14 @@ class HMin final : public HBinaryOperation { ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc()); } // TODO: Evaluation for floating-point values. - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; } + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { + return nullptr; + } + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { + return nullptr; + } DECLARE_INSTRUCTION(Min); @@ -5637,10 +5639,14 @@ class HMax final : public HBinaryOperation { ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc()); } // TODO: Evaluation for floating-point values. - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { return nullptr; } + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { + return nullptr; + } + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { + return nullptr; + } DECLARE_INSTRUCTION(Max); @@ -5702,7 +5708,7 @@ class HDivZeroCheck final : public HExpression<1> { bool IsClonable() const override { return true; } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -5739,18 +5745,18 @@ class HShl final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc()); } - HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED, - HLongConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HLongConstant* value, + [[maybe_unused]] HLongConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for the (long, long) case."; UNREACHABLE(); } - HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED, - HFloatConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* value, + [[maybe_unused]] HFloatConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED, - HDoubleConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* value, + [[maybe_unused]] HDoubleConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -5785,18 +5791,18 @@ class HShr final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc()); } - HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED, - HLongConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HLongConstant* value, + [[maybe_unused]] HLongConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for the (long, long) case."; UNREACHABLE(); } - HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED, - HFloatConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* value, + [[maybe_unused]] HFloatConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED, - HDoubleConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* value, + [[maybe_unused]] HDoubleConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -5833,18 +5839,18 @@ class HUShr final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc()); } - HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED, - HLongConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HLongConstant* value, + [[maybe_unused]] HLongConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for the (long, long) case."; UNREACHABLE(); } - HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED, - HFloatConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* value, + [[maybe_unused]] HFloatConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED, - HDoubleConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* value, + [[maybe_unused]] HDoubleConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -5876,13 +5882,13 @@ class HAnd final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -5914,13 +5920,13 @@ class HOr final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -5952,13 +5958,13 @@ class HXor final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -5996,18 +6002,18 @@ class HRor final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc()); } - HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED, - HLongConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HLongConstant* value, + [[maybe_unused]] HLongConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for the (long, long) case."; UNREACHABLE(); } - HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED, - HFloatConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* value, + [[maybe_unused]] HFloatConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED, - HDoubleConstant* distance ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* value, + [[maybe_unused]] HDoubleConstant* distance) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -6070,7 +6076,7 @@ class HNot final : public HUnaryOperation { } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -6082,11 +6088,11 @@ class HNot final : public HUnaryOperation { HConstant* Evaluate(HLongConstant* x) const override { return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -6104,7 +6110,7 @@ class HBooleanNot final : public HUnaryOperation { } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -6116,15 +6122,15 @@ class HBooleanNot final : public HUnaryOperation { HConstant* Evaluate(HIntConstant* x) const override { return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc()); } - HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HLongConstant* x) const override { LOG(FATAL) << DebugName() << " is not defined for long values"; UNREACHABLE(); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -6151,7 +6157,7 @@ class HTypeConversion final : public HExpression<1> { bool IsClonable() const override { return true; } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } // Return whether the conversion is implicit. This includes conversion to the same type. @@ -6183,7 +6189,7 @@ class HNullCheck final : public HExpression<1> { bool IsClonable() const override { return true; } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -6536,10 +6542,10 @@ class HArrayGet final : public HExpression<2> { bool IsClonable() const override { return true; } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } - bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override { + bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override { // TODO: We can be smarter here. // Currently, unless the array is the result of NewArray, the array access is always // preceded by some form of null NullCheck necessary for the bounds check, usually @@ -6643,7 +6649,7 @@ class HArraySet final : public HExpression<3> { // Can throw ArrayStoreException. bool CanThrow() const override { return NeedsTypeCheck(); } - bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const override { + bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override { // TODO: Same as for ArrayGet. return false; } @@ -6749,7 +6755,7 @@ class HArrayLength final : public HExpression<1> { bool IsClonable() const override { return true; } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } bool CanDoImplicitNullCheckOn(HInstruction* obj) const override { @@ -6793,7 +6799,7 @@ class HBoundsCheck final : public HExpression<2> { bool IsClonable() const override { return true; } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -7426,7 +7432,7 @@ class HClinitCheck final : public HExpression<1> { } // TODO: Make ClinitCheck clonable. bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -8346,7 +8352,7 @@ class HSelect final : public HExpression<3> { HInstruction* GetCondition() const { return InputAt(2); } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } @@ -8516,7 +8522,7 @@ class HIntermediateAddress final : public HExpression<2> { bool IsClonable() const override { return true; } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } bool IsActualObject() const override { return false; } @@ -8553,7 +8559,7 @@ class HGraphVisitor : public ValueObject { graph_(graph) {} virtual ~HGraphVisitor() {} - virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {} + virtual void VisitInstruction([[maybe_unused]] HInstruction* instruction) {} virtual void VisitBasicBlock(HBasicBlock* block); // Visit the graph following basic block insertion order. diff --git a/compiler/optimizing/nodes_shared.h b/compiler/optimizing/nodes_shared.h index 27e610328f..4b0187d536 100644 --- a/compiler/optimizing/nodes_shared.h +++ b/compiler/optimizing/nodes_shared.h @@ -105,13 +105,13 @@ class HBitwiseNegatedRight final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -160,7 +160,7 @@ class HIntermediateAddressIndex final : public HExpression<3> { bool IsClonable() const override { return true; } bool CanBeMoved() const override { return true; } - bool InstructionDataEquals(const HInstruction* other ATTRIBUTE_UNUSED) const override { + bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override { return true; } bool IsActualObject() const override { return false; } diff --git a/compiler/optimizing/nodes_x86.h b/compiler/optimizing/nodes_x86.h index e246390aa5..14d9823355 100644 --- a/compiler/optimizing/nodes_x86.h +++ b/compiler/optimizing/nodes_x86.h @@ -149,13 +149,13 @@ class HX86AndNot final : public HBinaryOperation { return GetBlock()->GetGraph()->GetLongConstant( Compute(x->GetValue(), y->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED, - HFloatConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x, + [[maybe_unused]] HFloatConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED, - HDoubleConstant* y ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x, + [[maybe_unused]] HDoubleConstant* y) const override { LOG(FATAL) << DebugName() << " is not defined for double values"; UNREACHABLE(); } @@ -196,11 +196,11 @@ class HX86MaskOrResetLeastSetBit final : public HUnaryOperation { HConstant* Evaluate(HLongConstant* x) const override { return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc()); } - HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HFloatConstant* x) const override { LOG(FATAL) << DebugName() << "is not defined for float values"; UNREACHABLE(); } - HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const override { + HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x) const override { LOG(FATAL) << DebugName() << "is not defined for double values"; UNREACHABLE(); } diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 00eb6e5c42..8207501090 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -447,8 +447,8 @@ void OptimizingCompiler::DumpInstructionSetFeaturesToCfg() const { << HGraphVisualizer::InsertMetaDataAsCompilationBlock(isa_string + ' ' + features_string); } -bool OptimizingCompiler::CanCompileMethod(uint32_t method_idx ATTRIBUTE_UNUSED, - const DexFile& dex_file ATTRIBUTE_UNUSED) const { +bool OptimizingCompiler::CanCompileMethod([[maybe_unused]] uint32_t method_idx, + [[maybe_unused]] const DexFile& dex_file) const { return true; } @@ -1221,7 +1221,7 @@ Compiler* CreateOptimizingCompiler(const CompilerOptions& compiler_options, return new OptimizingCompiler(compiler_options, storage); } -bool EncodeArtMethodInInlineInfo(ArtMethod* method ATTRIBUTE_UNUSED) { +bool EncodeArtMethodInInlineInfo([[maybe_unused]] ArtMethod* method) { // Note: the runtime is null only for unit testing. return Runtime::Current() == nullptr || !Runtime::Current()->IsAotCompiler(); } diff --git a/compiler/optimizing/parallel_move_test.cc b/compiler/optimizing/parallel_move_test.cc index a1c05e9cad..d2b993280d 100644 --- a/compiler/optimizing/parallel_move_test.cc +++ b/compiler/optimizing/parallel_move_test.cc @@ -81,8 +81,8 @@ class TestParallelMoveResolverWithSwap : public ParallelMoveResolverWithSwap { message_ << ")"; } - void SpillScratch(int reg ATTRIBUTE_UNUSED) override {} - void RestoreScratch(int reg ATTRIBUTE_UNUSED) override {} + void SpillScratch([[maybe_unused]] int reg) override {} + void RestoreScratch([[maybe_unused]] int reg) override {} std::string GetMessage() const { return message_.str(); @@ -126,7 +126,7 @@ class TestParallelMoveResolverNoSwap : public ParallelMoveResolverNoSwap { return scratch; } - void FreeScratchLocation(Location loc ATTRIBUTE_UNUSED) override {} + void FreeScratchLocation([[maybe_unused]] Location loc) override {} void EmitMove(size_t index) override { MoveOperands* move = moves_[index]; diff --git a/compiler/optimizing/scheduler_arm.cc b/compiler/optimizing/scheduler_arm.cc index 3f931c4c49..53ad2b12c0 100644 --- a/compiler/optimizing/scheduler_arm.cc +++ b/compiler/optimizing/scheduler_arm.cc @@ -610,7 +610,7 @@ void SchedulingLatencyVisitorARM::VisitDataProcWithShifterOp(HDataProcWithShifte } } -void SchedulingLatencyVisitorARM::VisitIntermediateAddress(HIntermediateAddress* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM::VisitIntermediateAddress([[maybe_unused]] HIntermediateAddress*) { // Although the code generated is a simple `add` instruction, we found through empirical results // that spacing it from its use in memory accesses was beneficial. last_visited_internal_latency_ = kArmNopLatency; @@ -618,11 +618,11 @@ void SchedulingLatencyVisitorARM::VisitIntermediateAddress(HIntermediateAddress* } void SchedulingLatencyVisitorARM::VisitIntermediateAddressIndex( - HIntermediateAddressIndex* ATTRIBUTE_UNUSED) { + [[maybe_unused]] HIntermediateAddressIndex*) { UNIMPLEMENTED(FATAL) << "IntermediateAddressIndex is not implemented for ARM"; } -void SchedulingLatencyVisitorARM::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM::VisitMultiplyAccumulate([[maybe_unused]] HMultiplyAccumulate*) { last_visited_latency_ = kArmMulIntegerLatency; } @@ -806,7 +806,7 @@ void SchedulingLatencyVisitorARM::VisitArraySet(HArraySet* instruction) { } } -void SchedulingLatencyVisitorARM::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM::VisitBoundsCheck([[maybe_unused]] HBoundsCheck*) { last_visited_internal_latency_ = kArmIntegerOpLatency; // Users do not use any data results. last_visited_latency_ = 0; @@ -866,22 +866,22 @@ void SchedulingLatencyVisitorARM::VisitInstanceFieldSet(HInstanceFieldSet* instr HandleFieldSetLatencies(instruction, instruction->GetFieldInfo()); } -void SchedulingLatencyVisitorARM::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM::VisitInstanceOf([[maybe_unused]] HInstanceOf*) { last_visited_internal_latency_ = kArmCallInternalLatency; last_visited_latency_ = kArmIntegerOpLatency; } -void SchedulingLatencyVisitorARM::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM::VisitInvoke([[maybe_unused]] HInvoke*) { last_visited_internal_latency_ = kArmCallInternalLatency; last_visited_latency_ = kArmCallLatency; } -void SchedulingLatencyVisitorARM::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM::VisitLoadString([[maybe_unused]] HLoadString*) { last_visited_internal_latency_ = kArmLoadStringInternalLatency; last_visited_latency_ = kArmMemoryLoadLatency; } -void SchedulingLatencyVisitorARM::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM::VisitNewArray([[maybe_unused]] HNewArray*) { last_visited_internal_latency_ = kArmIntegerOpLatency + kArmCallInternalLatency; last_visited_latency_ = kArmCallLatency; } diff --git a/compiler/optimizing/scheduler_arm.h b/compiler/optimizing/scheduler_arm.h index 0da21c187f..cedc12a2be 100644 --- a/compiler/optimizing/scheduler_arm.h +++ b/compiler/optimizing/scheduler_arm.h @@ -53,7 +53,7 @@ class SchedulingLatencyVisitorARM final : public SchedulingLatencyVisitor { : codegen_(down_cast<CodeGeneratorARMVIXL*>(codegen)) {} // Default visitor for instructions not handled specifically below. - void VisitInstruction(HInstruction* ATTRIBUTE_UNUSED) override { + void VisitInstruction([[maybe_unused]] HInstruction*) override { last_visited_latency_ = kArmIntegerOpLatency; } diff --git a/compiler/optimizing/scheduler_arm64.cc b/compiler/optimizing/scheduler_arm64.cc index 3071afd951..5113cf446d 100644 --- a/compiler/optimizing/scheduler_arm64.cc +++ b/compiler/optimizing/scheduler_arm64.cc @@ -30,30 +30,30 @@ void SchedulingLatencyVisitorARM64::VisitBinaryOperation(HBinaryOperation* instr } void SchedulingLatencyVisitorARM64::VisitBitwiseNegatedRight( - HBitwiseNegatedRight* ATTRIBUTE_UNUSED) { + [[maybe_unused]] HBitwiseNegatedRight*) { last_visited_latency_ = kArm64IntegerOpLatency; } void SchedulingLatencyVisitorARM64::VisitDataProcWithShifterOp( - HDataProcWithShifterOp* ATTRIBUTE_UNUSED) { + [[maybe_unused]] HDataProcWithShifterOp*) { last_visited_latency_ = kArm64DataProcWithShifterOpLatency; } void SchedulingLatencyVisitorARM64::VisitIntermediateAddress( - HIntermediateAddress* ATTRIBUTE_UNUSED) { + [[maybe_unused]] HIntermediateAddress*) { // Although the code generated is a simple `add` instruction, we found through empirical results // that spacing it from its use in memory accesses was beneficial. last_visited_latency_ = kArm64IntegerOpLatency + 2; } void SchedulingLatencyVisitorARM64::VisitIntermediateAddressIndex( - HIntermediateAddressIndex* instr ATTRIBUTE_UNUSED) { + [[maybe_unused]] HIntermediateAddressIndex* instr) { // Although the code generated is a simple `add` instruction, we found through empirical results // that spacing it from its use in memory accesses was beneficial. last_visited_latency_ = kArm64DataProcWithShifterOpLatency + 2; } -void SchedulingLatencyVisitorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitMultiplyAccumulate([[maybe_unused]] HMultiplyAccumulate*) { last_visited_latency_ = kArm64MulIntegerLatency; } @@ -65,15 +65,15 @@ void SchedulingLatencyVisitorARM64::VisitArrayGet(HArrayGet* instruction) { last_visited_latency_ = kArm64MemoryLoadLatency; } -void SchedulingLatencyVisitorARM64::VisitArrayLength(HArrayLength* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitArrayLength([[maybe_unused]] HArrayLength*) { last_visited_latency_ = kArm64MemoryLoadLatency; } -void SchedulingLatencyVisitorARM64::VisitArraySet(HArraySet* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitArraySet([[maybe_unused]] HArraySet*) { last_visited_latency_ = kArm64MemoryStoreLatency; } -void SchedulingLatencyVisitorARM64::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitBoundsCheck([[maybe_unused]] HBoundsCheck*) { last_visited_internal_latency_ = kArm64IntegerOpLatency; // Users do not use any data results. last_visited_latency_ = 0; @@ -113,21 +113,21 @@ void SchedulingLatencyVisitorARM64::VisitDiv(HDiv* instr) { } } -void SchedulingLatencyVisitorARM64::VisitInstanceFieldGet(HInstanceFieldGet* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitInstanceFieldGet([[maybe_unused]] HInstanceFieldGet*) { last_visited_latency_ = kArm64MemoryLoadLatency; } -void SchedulingLatencyVisitorARM64::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitInstanceOf([[maybe_unused]] HInstanceOf*) { last_visited_internal_latency_ = kArm64CallInternalLatency; last_visited_latency_ = kArm64IntegerOpLatency; } -void SchedulingLatencyVisitorARM64::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitInvoke([[maybe_unused]] HInvoke*) { last_visited_internal_latency_ = kArm64CallInternalLatency; last_visited_latency_ = kArm64CallLatency; } -void SchedulingLatencyVisitorARM64::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitLoadString([[maybe_unused]] HLoadString*) { last_visited_internal_latency_ = kArm64LoadStringInternalLatency; last_visited_latency_ = kArm64MemoryLoadLatency; } @@ -138,7 +138,7 @@ void SchedulingLatencyVisitorARM64::VisitMul(HMul* instr) { : kArm64MulIntegerLatency; } -void SchedulingLatencyVisitorARM64::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitNewArray([[maybe_unused]] HNewArray*) { last_visited_internal_latency_ = kArm64IntegerOpLatency + kArm64CallInternalLatency; last_visited_latency_ = kArm64CallLatency; } @@ -181,7 +181,7 @@ void SchedulingLatencyVisitorARM64::VisitRem(HRem* instruction) { } } -void SchedulingLatencyVisitorARM64::VisitStaticFieldGet(HStaticFieldGet* ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitStaticFieldGet([[maybe_unused]] HStaticFieldGet*) { last_visited_latency_ = kArm64MemoryLoadLatency; } @@ -211,7 +211,7 @@ void SchedulingLatencyVisitorARM64::HandleSimpleArithmeticSIMD(HVecOperation *in } void SchedulingLatencyVisitorARM64::VisitVecReplicateScalar( - HVecReplicateScalar* instr ATTRIBUTE_UNUSED) { + [[maybe_unused]] HVecReplicateScalar* instr) { last_visited_latency_ = kArm64SIMDReplicateOpLatency; } @@ -223,7 +223,7 @@ void SchedulingLatencyVisitorARM64::VisitVecReduce(HVecReduce* instr) { HandleSimpleArithmeticSIMD(instr); } -void SchedulingLatencyVisitorARM64::VisitVecCnv(HVecCnv* instr ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitVecCnv([[maybe_unused]] HVecCnv* instr) { last_visited_latency_ = kArm64SIMDTypeConversionInt2FPLatency; } @@ -279,19 +279,19 @@ void SchedulingLatencyVisitorARM64::VisitVecMax(HVecMax* instr) { HandleSimpleArithmeticSIMD(instr); } -void SchedulingLatencyVisitorARM64::VisitVecAnd(HVecAnd* instr ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitVecAnd([[maybe_unused]] HVecAnd* instr) { last_visited_latency_ = kArm64SIMDIntegerOpLatency; } -void SchedulingLatencyVisitorARM64::VisitVecAndNot(HVecAndNot* instr ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitVecAndNot([[maybe_unused]] HVecAndNot* instr) { last_visited_latency_ = kArm64SIMDIntegerOpLatency; } -void SchedulingLatencyVisitorARM64::VisitVecOr(HVecOr* instr ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitVecOr([[maybe_unused]] HVecOr* instr) { last_visited_latency_ = kArm64SIMDIntegerOpLatency; } -void SchedulingLatencyVisitorARM64::VisitVecXor(HVecXor* instr ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::VisitVecXor([[maybe_unused]] HVecXor* instr) { last_visited_latency_ = kArm64SIMDIntegerOpLatency; } @@ -312,13 +312,12 @@ void SchedulingLatencyVisitorARM64::VisitVecSetScalars(HVecSetScalars* instr) { } void SchedulingLatencyVisitorARM64::VisitVecMultiplyAccumulate( - HVecMultiplyAccumulate* instr ATTRIBUTE_UNUSED) { + [[maybe_unused]] HVecMultiplyAccumulate* instr) { last_visited_latency_ = kArm64SIMDMulIntegerLatency; } -void SchedulingLatencyVisitorARM64::HandleVecAddress( - HVecMemoryOperation* instruction, - size_t size ATTRIBUTE_UNUSED) { +void SchedulingLatencyVisitorARM64::HandleVecAddress(HVecMemoryOperation* instruction, + [[maybe_unused]] size_t size) { HInstruction* index = instruction->InputAt(1); if (!index->IsConstant()) { last_visited_internal_latency_ += kArm64DataProcWithShifterOpLatency; diff --git a/compiler/optimizing/scheduler_arm64.h b/compiler/optimizing/scheduler_arm64.h index ec41577e9d..7ce00e00ab 100644 --- a/compiler/optimizing/scheduler_arm64.h +++ b/compiler/optimizing/scheduler_arm64.h @@ -59,7 +59,7 @@ static constexpr uint32_t kArm64SIMDTypeConversionInt2FPLatency = 10; class SchedulingLatencyVisitorARM64 final : public SchedulingLatencyVisitor { public: // Default visitor for instructions not handled specifically below. - void VisitInstruction(HInstruction* ATTRIBUTE_UNUSED) override { + void VisitInstruction([[maybe_unused]] HInstruction*) override { last_visited_latency_ = kArm64IntegerOpLatency; } diff --git a/compiler/utils/arm/assembler_arm_vixl.h b/compiler/utils/arm/assembler_arm_vixl.h index e4d864b54d..025bba0415 100644 --- a/compiler/utils/arm/assembler_arm_vixl.h +++ b/compiler/utils/arm/assembler_arm_vixl.h @@ -220,10 +220,10 @@ class ArmVIXLAssembler final : public Assembler { // Copy instructions out of assembly buffer into the given region of memory. void FinalizeInstructions(const MemoryRegion& region) override; - void Bind(Label* label ATTRIBUTE_UNUSED) override { + void Bind([[maybe_unused]] Label* label) override { UNIMPLEMENTED(FATAL) << "Do not use Bind(Label*) for ARM"; } - void Jump(Label* label ATTRIBUTE_UNUSED) override { + void Jump([[maybe_unused]] Label* label) override { UNIMPLEMENTED(FATAL) << "Do not use Jump(Label*) for ARM"; } diff --git a/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc b/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc index 54873454eb..7a887fa064 100644 --- a/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc +++ b/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc @@ -344,13 +344,13 @@ void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs } } -void ArmVIXLJNIMacroAssembler::SignExtend(ManagedRegister mreg ATTRIBUTE_UNUSED, - size_t size ATTRIBUTE_UNUSED) { +void ArmVIXLJNIMacroAssembler::SignExtend([[maybe_unused]] ManagedRegister mreg, + [[maybe_unused]] size_t size) { UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm"; } -void ArmVIXLJNIMacroAssembler::ZeroExtend(ManagedRegister mreg ATTRIBUTE_UNUSED, - size_t size ATTRIBUTE_UNUSED) { +void ArmVIXLJNIMacroAssembler::ZeroExtend([[maybe_unused]] ManagedRegister mreg, + [[maybe_unused]] size_t size) { UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm"; } @@ -720,7 +720,7 @@ void ArmVIXLJNIMacroAssembler::MoveArguments(ArrayRef<ArgumentLocation> dests, void ArmVIXLJNIMacroAssembler::Move(ManagedRegister mdst, ManagedRegister msrc, - size_t size ATTRIBUTE_UNUSED) { + [[maybe_unused]] size_t size) { ArmManagedRegister dst = mdst.AsArm(); if (kIsDebugBuild) { // Check that the destination is not a scratch register. @@ -861,13 +861,13 @@ void ArmVIXLJNIMacroAssembler::DecodeJNITransitionOrLocalJObject(ManagedRegister ___ Ldr(reg, MemOperand(reg)); } -void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED, - bool could_be_null ATTRIBUTE_UNUSED) { +void ArmVIXLJNIMacroAssembler::VerifyObject([[maybe_unused]] ManagedRegister src, + [[maybe_unused]] bool could_be_null) { // TODO: not validating references. } -void ArmVIXLJNIMacroAssembler::VerifyObject(FrameOffset src ATTRIBUTE_UNUSED, - bool could_be_null ATTRIBUTE_UNUSED) { +void ArmVIXLJNIMacroAssembler::VerifyObject([[maybe_unused]] FrameOffset src, + [[maybe_unused]] bool could_be_null) { // TODO: not validating references. } diff --git a/compiler/utils/arm64/assembler_arm64.h b/compiler/utils/arm64/assembler_arm64.h index f8168903bd..5eff8ca907 100644 --- a/compiler/utils/arm64/assembler_arm64.h +++ b/compiler/utils/arm64/assembler_arm64.h @@ -145,10 +145,10 @@ class Arm64Assembler final : public Assembler { // MaybeGenerateMarkingRegisterCheck and is passed to the BRK instruction. void GenerateMarkingRegisterCheck(vixl::aarch64::Register temp, int code = 0); - void Bind(Label* label ATTRIBUTE_UNUSED) override { + void Bind([[maybe_unused]] Label* label) override { UNIMPLEMENTED(FATAL) << "Do not use Bind(Label*) for ARM64"; } - void Jump(Label* label ATTRIBUTE_UNUSED) override { + void Jump([[maybe_unused]] Label* label) override { UNIMPLEMENTED(FATAL) << "Do not use Jump(Label*) for ARM64"; } diff --git a/compiler/utils/arm64/jni_macro_assembler_arm64.cc b/compiler/utils/arm64/jni_macro_assembler_arm64.cc index 9e9f122cf6..c5380695d9 100644 --- a/compiler/utils/arm64/jni_macro_assembler_arm64.cc +++ b/compiler/utils/arm64/jni_macro_assembler_arm64.cc @@ -705,7 +705,7 @@ void Arm64JNIMacroAssembler::DecodeJNITransitionOrLocalJObject(ManagedRegister m } void Arm64JNIMacroAssembler::TryToTransitionFromRunnableToNative( - JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs ATTRIBUTE_UNUSED) { + JNIMacroLabel* label, [[maybe_unused]] ArrayRef<const ManagedRegister> scratch_regs) { constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative); constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable); constexpr ThreadOffset64 thread_flags_offset = Thread::ThreadFlagsOffset<kArm64PointerSize>(); @@ -734,8 +734,8 @@ void Arm64JNIMacroAssembler::TryToTransitionFromRunnableToNative( void Arm64JNIMacroAssembler::TryToTransitionFromNativeToRunnable( JNIMacroLabel* label, - ArrayRef<const ManagedRegister> scratch_regs ATTRIBUTE_UNUSED, - ManagedRegister return_reg ATTRIBUTE_UNUSED) { + [[maybe_unused]] ArrayRef<const ManagedRegister> scratch_regs, + [[maybe_unused]] ManagedRegister return_reg) { constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative); constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable); constexpr ThreadOffset64 thread_flags_offset = Thread::ThreadFlagsOffset<kArm64PointerSize>(); diff --git a/compiler/utils/assembler.h b/compiler/utils/assembler.h index 13a5d9fd01..63747bee58 100644 --- a/compiler/utils/assembler.h +++ b/compiler/utils/assembler.h @@ -380,7 +380,7 @@ class Assembler : public DeletableArenaObject<kArenaAllocAssembler> { } // TODO: Implement with disassembler. - virtual void Comment(const char* format ATTRIBUTE_UNUSED, ...) {} + virtual void Comment([[maybe_unused]] const char* format, ...) {} virtual void Bind(Label* label) = 0; virtual void Jump(Label* label) = 0; diff --git a/compiler/utils/assembler_test.h b/compiler/utils/assembler_test.h index 43438255d2..810c843b7b 100644 --- a/compiler/utils/assembler_test.h +++ b/compiler/utils/assembler_test.h @@ -803,19 +803,19 @@ class AssemblerTest : public AssemblerTestBase { } // Secondary register names are the secondary view on registers, e.g., 32b on 64b systems. - virtual std::string GetSecondaryRegisterName(const Reg& reg ATTRIBUTE_UNUSED) { + virtual std::string GetSecondaryRegisterName([[maybe_unused]] const Reg& reg) { UNIMPLEMENTED(FATAL) << "Architecture does not support secondary registers"; UNREACHABLE(); } // Tertiary register names are the tertiary view on registers, e.g., 16b on 64b systems. - virtual std::string GetTertiaryRegisterName(const Reg& reg ATTRIBUTE_UNUSED) { + virtual std::string GetTertiaryRegisterName([[maybe_unused]] const Reg& reg) { UNIMPLEMENTED(FATAL) << "Architecture does not support tertiary registers"; UNREACHABLE(); } // Quaternary register names are the quaternary view on registers, e.g., 8b on 64b systems. - virtual std::string GetQuaternaryRegisterName(const Reg& reg ATTRIBUTE_UNUSED) { + virtual std::string GetQuaternaryRegisterName([[maybe_unused]] const Reg& reg) { UNIMPLEMENTED(FATAL) << "Architecture does not support quaternary registers"; UNREACHABLE(); } @@ -1576,8 +1576,7 @@ class AssemblerTest : public AssemblerTestBase { } // Override this to pad the code with NOPs to a certain size if needed. - virtual void Pad(std::vector<uint8_t>& data ATTRIBUTE_UNUSED) { - } + virtual void Pad([[maybe_unused]] std::vector<uint8_t>& data) {} void DriverWrapper(const std::string& assembly_text, const std::string& test_name) { assembler_->FinalizeCode(); diff --git a/compiler/utils/jni_macro_assembler_test.h b/compiler/utils/jni_macro_assembler_test.h index ac8e7d3010..0d0a992ebd 100644 --- a/compiler/utils/jni_macro_assembler_test.h +++ b/compiler/utils/jni_macro_assembler_test.h @@ -77,8 +77,7 @@ class JNIMacroAssemblerTest : public AssemblerTestBase { private: // Override this to pad the code with NOPs to a certain size if needed. - virtual void Pad(std::vector<uint8_t>& data ATTRIBUTE_UNUSED) { - } + virtual void Pad([[maybe_unused]] std::vector<uint8_t>& data) {} void DriverWrapper(const std::string& assembly_text, const std::string& test_name) { assembler_->FinalizeCode(); diff --git a/compiler/utils/riscv64/assembler_riscv64.h b/compiler/utils/riscv64/assembler_riscv64.h index 13e7826320..0d781231ba 100644 --- a/compiler/utils/riscv64/assembler_riscv64.h +++ b/compiler/utils/riscv64/assembler_riscv64.h @@ -358,10 +358,10 @@ class Riscv64Assembler final : public Assembler { /////////////////////////////// RV64 MACRO Instructions END /////////////////////////////// - void Bind(Label* label ATTRIBUTE_UNUSED) override { + void Bind([[maybe_unused]] Label* label) override { UNIMPLEMENTED(FATAL) << "TODO: Support branches."; } - void Jump(Label* label ATTRIBUTE_UNUSED) override { + void Jump([[maybe_unused]] Label* label) override { UNIMPLEMENTED(FATAL) << "Do not use Jump for RISCV64"; } diff --git a/compiler/utils/stack_checks.h b/compiler/utils/stack_checks.h index d0fff73df3..1be4532f3e 100644 --- a/compiler/utils/stack_checks.h +++ b/compiler/utils/stack_checks.h @@ -35,7 +35,7 @@ static constexpr size_t kSmallFrameSize = 1 * KB; // stack overflow check on method entry. // // A frame is considered large when it's above kLargeFrameSize. -static inline bool FrameNeedsStackCheck(size_t size, InstructionSet isa ATTRIBUTE_UNUSED) { +static inline bool FrameNeedsStackCheck(size_t size, [[maybe_unused]] InstructionSet isa) { return size >= kLargeFrameSize; } diff --git a/compiler/utils/x86/jni_macro_assembler_x86.cc b/compiler/utils/x86/jni_macro_assembler_x86.cc index 154e50b4e4..dfdbc183f1 100644 --- a/compiler/utils/x86/jni_macro_assembler_x86.cc +++ b/compiler/utils/x86/jni_macro_assembler_x86.cc @@ -83,7 +83,7 @@ void X86JNIMacroAssembler::BuildFrame(size_t frame_size, void X86JNIMacroAssembler::RemoveFrame(size_t frame_size, ArrayRef<const ManagedRegister> spill_regs, - bool may_suspend ATTRIBUTE_UNUSED) { + [[maybe_unused]] bool may_suspend) { CHECK_ALIGNED(frame_size, kNativeStackAlignment); cfi().RememberState(); // -kFramePointerSize for ArtMethod*. diff --git a/compiler/utils/x86_64/assembler_x86_64_test.cc b/compiler/utils/x86_64/assembler_x86_64_test.cc index a7c206afaa..5f7845f02c 100644 --- a/compiler/utils/x86_64/assembler_x86_64_test.cc +++ b/compiler/utils/x86_64/assembler_x86_64_test.cc @@ -2135,7 +2135,7 @@ TEST_F(AssemblerX86_64Test, Psrldq) { "psrldq $2, %xmm15\n", "psrldqi"); } -std::string x87_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, +std::string x87_fn([[maybe_unused]] AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { std::ostringstream str; @@ -2202,7 +2202,7 @@ TEST_F(AssemblerX86_64Test, RetImm) { "ret ${imm}", /*non-negative*/ true), "ret"); } -std::string ret_and_leave_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, +std::string ret_and_leave_fn([[maybe_unused]] AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { std::ostringstream str; @@ -2513,7 +2513,7 @@ static x86_64::X86_64ManagedRegister ManagedFromFpu(x86_64::FloatRegister r) { return x86_64::X86_64ManagedRegister::FromXmmRegister(r); } -std::string buildframe_test_fn(JNIMacroAssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, +std::string buildframe_test_fn([[maybe_unused]] JNIMacroAssemblerX86_64Test::Base* assembler_test, x86_64::X86_64JNIMacroAssembler* assembler) { // TODO: more interesting spill registers / entry spills. @@ -2556,7 +2556,7 @@ TEST_F(JNIMacroAssemblerX86_64Test, BuildFrame) { DriverFn(&buildframe_test_fn, "BuildFrame"); } -std::string removeframe_test_fn(JNIMacroAssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, +std::string removeframe_test_fn([[maybe_unused]] JNIMacroAssemblerX86_64Test::Base* assembler_test, x86_64::X86_64JNIMacroAssembler* assembler) { // TODO: more interesting spill registers / entry spills. @@ -2588,7 +2588,7 @@ TEST_F(JNIMacroAssemblerX86_64Test, RemoveFrame) { } std::string increaseframe_test_fn( - JNIMacroAssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIMacroAssemblerX86_64Test::Base* assembler_test, x86_64::X86_64JNIMacroAssembler* assembler) { assembler->IncreaseFrameSize(0U); assembler->IncreaseFrameSize(kStackAlignment); @@ -2608,7 +2608,7 @@ TEST_F(JNIMacroAssemblerX86_64Test, IncreaseFrame) { } std::string decreaseframe_test_fn( - JNIMacroAssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIMacroAssemblerX86_64Test::Base* assembler_test, x86_64::X86_64JNIMacroAssembler* assembler) { assembler->DecreaseFrameSize(0U); assembler->DecreaseFrameSize(kStackAlignment); diff --git a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc index 388845730e..e9e6dbdae7 100644 --- a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc +++ b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc @@ -95,7 +95,7 @@ void X86_64JNIMacroAssembler::BuildFrame(size_t frame_size, void X86_64JNIMacroAssembler::RemoveFrame(size_t frame_size, ArrayRef<const ManagedRegister> spill_regs, - bool may_suspend ATTRIBUTE_UNUSED) { + [[maybe_unused]] bool may_suspend) { CHECK_ALIGNED(frame_size, kNativeStackAlignment); cfi().RememberState(); int gpr_count = 0; @@ -515,7 +515,7 @@ void X86_64JNIMacroAssembler::GetCurrentThread(FrameOffset offset) { } void X86_64JNIMacroAssembler::TryToTransitionFromRunnableToNative( - JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs ATTRIBUTE_UNUSED) { + JNIMacroLabel* label, [[maybe_unused]] ArrayRef<const ManagedRegister> scratch_regs) { constexpr uint32_t kNativeStateValue = Thread::StoredThreadStateValue(ThreadState::kNative); constexpr uint32_t kRunnableStateValue = Thread::StoredThreadStateValue(ThreadState::kRunnable); constexpr ThreadOffset64 thread_flags_offset = Thread::ThreadFlagsOffset<kX86_64PointerSize>(); diff --git a/dex2oat/dex2oat_test.cc b/dex2oat/dex2oat_test.cc index db7e55a28c..be442075e4 100644 --- a/dex2oat/dex2oat_test.cc +++ b/dex2oat/dex2oat_test.cc @@ -207,7 +207,7 @@ class Dex2oatTest : public Dex2oatEnvironmentTest { // to what's already huge test methods). class Dex2oatWithExpectedFilterTest : public Dex2oatTest { protected: - void CheckFilter(CompilerFilter::Filter expected ATTRIBUTE_UNUSED, + void CheckFilter([[maybe_unused]] CompilerFilter::Filter expected, CompilerFilter::Filter actual) override { EXPECT_EQ(expected_filter_, actual); } @@ -251,7 +251,7 @@ class Dex2oatSwapTest : public Dex2oatTest { } } - virtual void CheckTargetResult(bool expect_use ATTRIBUTE_UNUSED) { + virtual void CheckTargetResult([[maybe_unused]] bool expect_use) { // TODO: Ignore for now, as we won't capture any output (it goes to the logcat). We may do // something for variants with file descriptor where we can control the lifetime of // the swap file and thus take a look at it. @@ -441,8 +441,8 @@ TEST_F(Dex2oatSwapUseTest, CheckSwapUsage) { class Dex2oatVeryLargeTest : public Dex2oatTest { protected: - void CheckFilter(CompilerFilter::Filter input ATTRIBUTE_UNUSED, - CompilerFilter::Filter result ATTRIBUTE_UNUSED) override { + void CheckFilter([[maybe_unused]] CompilerFilter::Filter input, + [[maybe_unused]] CompilerFilter::Filter result) override { // Ignore, we'll do our own checks. } @@ -537,7 +537,7 @@ class Dex2oatVeryLargeTest : public Dex2oatTest { } } - void CheckTargetResult(bool expect_downgrade ATTRIBUTE_UNUSED) { + void CheckTargetResult([[maybe_unused]] bool expect_downgrade) { // TODO: Ignore for now. May do something for fd things. } @@ -591,8 +591,8 @@ TEST_F(Dex2oatVeryLargeTest, SpeedProfileNoProfile) { class Dex2oatLayoutTest : public Dex2oatTest { protected: - void CheckFilter(CompilerFilter::Filter input ATTRIBUTE_UNUSED, - CompilerFilter::Filter result ATTRIBUTE_UNUSED) override { + void CheckFilter([[maybe_unused]] CompilerFilter::Filter input, + [[maybe_unused]] CompilerFilter::Filter result) override { // Ignore, we'll do our own checks. } diff --git a/dex2oat/driver/compiler_driver.cc b/dex2oat/driver/compiler_driver.cc index df7835d87e..a963fcfe92 100644 --- a/dex2oat/driver/compiler_driver.cc +++ b/dex2oat/driver/compiler_driver.cc @@ -274,12 +274,12 @@ CompilerDriver::CompilerDriver( } CompilerDriver::~CompilerDriver() { - compiled_methods_.Visit([this](const DexFileReference& ref ATTRIBUTE_UNUSED, - CompiledMethod* method) { - if (method != nullptr) { - CompiledMethod::ReleaseSwapAllocatedCompiledMethod(GetCompiledMethodStorage(), method); - } - }); + compiled_methods_.Visit( + [this]([[maybe_unused]] const DexFileReference& ref, CompiledMethod* method) { + if (method != nullptr) { + CompiledMethod::ReleaseSwapAllocatedCompiledMethod(GetCompiledMethodStorage(), method); + } + }); } @@ -459,17 +459,16 @@ static void CompileMethodQuick( const DexFile& dex_file, Handle<mirror::DexCache> dex_cache, ProfileCompilationInfo::ProfileIndexType profile_index) { - auto quick_fn = [profile_index]( - Thread* self ATTRIBUTE_UNUSED, - CompilerDriver* driver, - const dex::CodeItem* code_item, - uint32_t access_flags, - InvokeType invoke_type, - uint16_t class_def_idx, - uint32_t method_idx, - Handle<mirror::ClassLoader> class_loader, - const DexFile& dex_file, - Handle<mirror::DexCache> dex_cache) { + auto quick_fn = [profile_index]([[maybe_unused]] Thread* self, + CompilerDriver* driver, + const dex::CodeItem* code_item, + uint32_t access_flags, + InvokeType invoke_type, + uint16_t class_def_idx, + uint32_t method_idx, + Handle<mirror::ClassLoader> class_loader, + const DexFile& dex_file, + Handle<mirror::DexCache> dex_cache) { DCHECK(driver != nullptr); const VerificationResults* results = driver->GetVerificationResults(); DCHECK(results != nullptr); @@ -761,7 +760,7 @@ static void EnsureVerifiedOrVerifyAtRuntime(jobject jclass_loader, } } -void CompilerDriver::PrepareDexFilesForOatFile(TimingLogger* timings ATTRIBUTE_UNUSED) { +void CompilerDriver::PrepareDexFilesForOatFile([[maybe_unused]] TimingLogger* timings) { compiled_classes_.AddDexFiles(GetCompilerOptions().GetDexFilesForOatFile()); } @@ -1231,8 +1230,7 @@ class ClinitImageUpdate { // Visitor for VisitReferences. void operator()(ObjPtr<mirror::Object> object, MemberOffset field_offset, - bool is_static ATTRIBUTE_UNUSED) const - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { mirror::Object* ref = object->GetFieldObject<mirror::Object>(field_offset); if (ref != nullptr) { VisitClinitClassesObject(ref); @@ -1240,13 +1238,13 @@ class ClinitImageUpdate { } // java.lang.ref.Reference visitor for VisitReferences. - void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, - ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const {} + void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, + [[maybe_unused]] ObjPtr<mirror::Reference> ref) const {} // Ignore class native roots. - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const {} - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + void VisitRootIfNonNull( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} + void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} void Walk() REQUIRES_SHARED(Locks::mutator_lock_) { // Find all the already-marked classes. diff --git a/dex2oat/linker/code_info_table_deduper.cc b/dex2oat/linker/code_info_table_deduper.cc index eff0292e87..e50f2a1fc2 100644 --- a/dex2oat/linker/code_info_table_deduper.cc +++ b/dex2oat/linker/code_info_table_deduper.cc @@ -79,7 +79,7 @@ size_t CodeInfoTableDeduper::Dedupe(const uint8_t* code_info_data) { // Insert entries for large tables to the `dedupe_set_` and check for duplicates. std::array<DedupeSetEntry*, kNumBitTables> dedupe_entries; std::fill(dedupe_entries.begin(), dedupe_entries.end(), nullptr); - CodeInfo::ForEachBitTableField([&](size_t i, auto member_pointer ATTRIBUTE_UNUSED) { + CodeInfo::ForEachBitTableField([&](size_t i, [[maybe_unused]] auto member_pointer) { if (LIKELY(code_info.HasBitTable(i))) { uint32_t table_bit_size = bit_table_bit_starts[i + 1u] - bit_table_bit_starts[i]; if (table_bit_size >= kMinDedupSize) { @@ -109,7 +109,7 @@ size_t CodeInfoTableDeduper::Dedupe(const uint8_t* code_info_data) { }); writer_.WriteInterleavedVarints(header); // Write bit tables and update offsets in `dedupe_set_` after encoding the `header`. - CodeInfo::ForEachBitTableField([&](size_t i, auto member_pointer ATTRIBUTE_UNUSED) { + CodeInfo::ForEachBitTableField([&](size_t i, [[maybe_unused]] auto member_pointer) { if (code_info.HasBitTable(i)) { size_t current_bit_offset = writer_.NumberOfWrittenBits(); if (code_info.IsBitTableDeduped(i)) { diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc index 7e30541e0e..b464f0a8d1 100644 --- a/dex2oat/linker/image_writer.cc +++ b/dex2oat/linker/image_writer.cc @@ -705,16 +705,15 @@ class ImageWriter::PruneObjectReferenceVisitor { : image_writer_(image_writer), early_exit_(early_exit), visited_(visited), result_(result) {} ALWAYS_INLINE void VisitRootIfNonNull( - mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const - REQUIRES_SHARED(Locks::mutator_lock_) { } + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const + REQUIRES_SHARED(Locks::mutator_lock_) {} - ALWAYS_INLINE void VisitRoot( - mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const - REQUIRES_SHARED(Locks::mutator_lock_) { } + ALWAYS_INLINE void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) + const REQUIRES_SHARED(Locks::mutator_lock_) {} - ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj, - MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const + ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj, + MemberOffset offset, + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset); @@ -747,8 +746,8 @@ class ImageWriter::PruneObjectReferenceVisitor { } } - ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, - ObjPtr<mirror::Reference> ref) const + ALWAYS_INLINE void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, + ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) { operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } @@ -1581,10 +1580,9 @@ class ImageWriter::LayoutHelper::CollectStringReferenceVisitor { } // Collects info for managed fields that reference managed Strings. - void operator() (ObjPtr<mirror::Object> obj, - MemberOffset member_offset, - bool is_static ATTRIBUTE_UNUSED) const - REQUIRES_SHARED(Locks::mutator_lock_) { + void operator()(ObjPtr<mirror::Object> obj, + MemberOffset member_offset, + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { ObjPtr<mirror::Object> referred_obj = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(member_offset); @@ -1595,8 +1593,7 @@ class ImageWriter::LayoutHelper::CollectStringReferenceVisitor { } ALWAYS_INLINE - void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, - ObjPtr<mirror::Reference> ref) const + void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) { operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } @@ -1614,25 +1611,25 @@ class ImageWriter::LayoutHelper::VisitReferencesVisitor { : helper_(helper), oat_index_(oat_index) {} // We do not visit native roots. These are handled with other logic. - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const { + void VisitRootIfNonNull( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const { LOG(FATAL) << "UNREACHABLE"; } - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const { + void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const { LOG(FATAL) << "UNREACHABLE"; } ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset); VisitReference(ref); } - ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, - ObjPtr<mirror::Reference> ref) const + ALWAYS_INLINE void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, + ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) { operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } @@ -2759,17 +2756,17 @@ class ImageWriter::FixupRootVisitor : public RootVisitor { explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) { } - void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED, - size_t count ATTRIBUTE_UNUSED, - const RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void VisitRoots([[maybe_unused]] mirror::Object*** roots, + [[maybe_unused]] size_t count, + [[maybe_unused]] const RootInfo& info) override + REQUIRES_SHARED(Locks::mutator_lock_) { LOG(FATAL) << "Unsupported"; } void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] const RootInfo& info) override + REQUIRES_SHARED(Locks::mutator_lock_) { for (size_t i = 0; i < count; ++i) { // Copy the reference. Since we do not have the address for recording the relocation, // it needs to be recorded explicitly by the user of FixupRootVisitor. @@ -3034,15 +3031,15 @@ class ImageWriter::FixupVisitor { } // We do not visit native roots. These are handled with other logic. - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const { + void VisitRootIfNonNull( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const { LOG(FATAL) << "UNREACHABLE"; } - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const { + void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const { LOG(FATAL) << "UNREACHABLE"; } - void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, MemberOffset offset, [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone, kWithoutReadBarrier>(offset); // Copy the reference and record the fixup if necessary. @@ -3051,8 +3048,7 @@ class ImageWriter::FixupVisitor { } // java.lang.ref.Reference visitor. - void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, - ObjPtr<mirror::Reference> ref) const + void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } @@ -3122,14 +3118,14 @@ class ImageWriter::FixupClassVisitor final : public FixupVisitor { FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {} - void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, MemberOffset offset, [[maybe_unused]] bool is_static) const REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { DCHECK(obj->IsClass()); FixupVisitor::operator()(obj, offset, /*is_static*/false); } - void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, - ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const + void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, + [[maybe_unused]] ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { LOG(FATAL) << "Reference not expected here."; } diff --git a/dex2oat/linker/multi_oat_relative_patcher_test.cc b/dex2oat/linker/multi_oat_relative_patcher_test.cc index a393eb8b1e..c3133b6ec0 100644 --- a/dex2oat/linker/multi_oat_relative_patcher_test.cc +++ b/dex2oat/linker/multi_oat_relative_patcher_test.cc @@ -34,7 +34,7 @@ class MultiOatRelativePatcherTest : public testing::Test { MockPatcher() { } uint32_t ReserveSpace(uint32_t offset, - const CompiledMethod* compiled_method ATTRIBUTE_UNUSED, + [[maybe_unused]] const CompiledMethod* compiled_method, MethodReference method_ref) override { last_reserve_offset_ = offset; last_reserve_method_ = method_ref; @@ -76,7 +76,7 @@ class MultiOatRelativePatcherTest : public testing::Test { return offset; } - void PatchCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, + void PatchCall([[maybe_unused]] std::vector<uint8_t>* code, uint32_t literal_offset, uint32_t patch_offset, uint32_t target_offset) override { @@ -85,7 +85,7 @@ class MultiOatRelativePatcherTest : public testing::Test { last_target_offset_ = target_offset; } - void PatchPcRelativeReference(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, + void PatchPcRelativeReference([[maybe_unused]] std::vector<uint8_t>* code, const LinkerPatch& patch, uint32_t patch_offset, uint32_t target_offset) override { @@ -94,20 +94,20 @@ class MultiOatRelativePatcherTest : public testing::Test { last_target_offset_ = target_offset; } - void PatchEntrypointCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED) override { + void PatchEntrypointCall([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset) override { LOG(FATAL) << "UNIMPLEMENTED"; } - void PatchBakerReadBarrierBranch(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED) override { + void PatchBakerReadBarrierBranch([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset) override { LOG(FATAL) << "UNIMPLEMENTED"; } std::vector<debug::MethodDebugInfo> GenerateThunkDebugInfo( - uint32_t executable_offset ATTRIBUTE_UNUSED) override { + [[maybe_unused]] uint32_t executable_offset) override { LOG(FATAL) << "UNIMPLEMENTED"; UNREACHABLE(); } diff --git a/dex2oat/linker/oat_writer.cc b/dex2oat/linker/oat_writer.cc index 222a5f4a5f..413d71f7d5 100644 --- a/dex2oat/linker/oat_writer.cc +++ b/dex2oat/linker/oat_writer.cc @@ -765,7 +765,7 @@ class OatWriter::InitBssLayoutMethodVisitor : public DexMethodVisitor { explicit InitBssLayoutMethodVisitor(OatWriter* writer) : DexMethodVisitor(writer, /* offset */ 0u) {} - bool VisitMethod(size_t class_def_method_index ATTRIBUTE_UNUSED, + bool VisitMethod([[maybe_unused]] size_t class_def_method_index, const ClassAccessor::Method& method) override { // Look for patches with .bss references and prepare maps with placeholders for their offsets. CompiledMethod* compiled_method = writer_->compiler_driver_->GetCompiledMethod( @@ -859,7 +859,7 @@ class OatWriter::InitOatClassesMethodVisitor : public DexMethodVisitor { return true; } - bool VisitMethod(size_t class_def_method_index ATTRIBUTE_UNUSED, + bool VisitMethod([[maybe_unused]] size_t class_def_method_index, const ClassAccessor::Method& method) override { // Fill in the compiled_methods_ array for methods that have a // CompiledMethod. We track the number of non-null entries in @@ -1396,8 +1396,8 @@ class OatWriter::InitMapMethodVisitor : public OatDexMethodVisitor { } bool VisitMethod(size_t class_def_method_index, - const ClassAccessor::Method& method ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] const ClassAccessor::Method& method) override + REQUIRES_SHARED(Locks::mutator_lock_) { OatClass* oat_class = &writer_->oat_classes_[oat_class_index_]; CompiledMethod* compiled_method = oat_class->GetCompiledMethod(class_def_method_index); @@ -2504,7 +2504,7 @@ bool OatWriter::WriteRodata(OutputStream* out) { return true; } -void OatWriter::WriteQuickeningInfo(/*out*/std::vector<uint8_t>* ATTRIBUTE_UNUSED) { +void OatWriter::WriteQuickeningInfo([[maybe_unused]] /*out*/ std::vector<uint8_t>*) { // Nothing to write. Leave `vdex_size_` untouched and unaligned. vdex_quickening_info_offset_ = vdex_size_; size_quickening_info_alignment_ = 0; diff --git a/dex2oat/linker/relative_patcher.cc b/dex2oat/linker/relative_patcher.cc index 40acb0bc6a..1c04812bf1 100644 --- a/dex2oat/linker/relative_patcher.cc +++ b/dex2oat/linker/relative_patcher.cc @@ -44,8 +44,8 @@ std::unique_ptr<RelativePatcher> RelativePatcher::Create( RelativePatcherNone() { } uint32_t ReserveSpace(uint32_t offset, - const CompiledMethod* compiled_method ATTRIBUTE_UNUSED, - MethodReference method_ref ATTRIBUTE_UNUSED) override { + [[maybe_unused]] const CompiledMethod* compiled_method, + [[maybe_unused]] MethodReference method_ref) override { return offset; // No space reserved; no patches expected. } @@ -53,38 +53,38 @@ std::unique_ptr<RelativePatcher> RelativePatcher::Create( return offset; // No space reserved; no patches expected. } - uint32_t WriteThunks(OutputStream* out ATTRIBUTE_UNUSED, uint32_t offset) override { + uint32_t WriteThunks([[maybe_unused]] OutputStream* out, uint32_t offset) override { return offset; // No thunks added; no patches expected. } - void PatchCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - uint32_t literal_offset ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED, - uint32_t target_offset ATTRIBUTE_UNUSED) override { + void PatchCall([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] uint32_t literal_offset, + [[maybe_unused]] uint32_t patch_offset, + [[maybe_unused]] uint32_t target_offset) override { LOG(FATAL) << "Unexpected relative call patch."; } - void PatchPcRelativeReference(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED, - uint32_t target_offset ATTRIBUTE_UNUSED) override { + void PatchPcRelativeReference([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset, + [[maybe_unused]] uint32_t target_offset) override { LOG(FATAL) << "Unexpected relative dex cache array patch."; } - void PatchEntrypointCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED) override { + void PatchEntrypointCall([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset) override { LOG(FATAL) << "Unexpected entrypoint call patch."; } - void PatchBakerReadBarrierBranch(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED) override { + void PatchBakerReadBarrierBranch([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset) override { LOG(FATAL) << "Unexpected baker read barrier branch patch."; } std::vector<debug::MethodDebugInfo> GenerateThunkDebugInfo( - uint32_t executable_offset ATTRIBUTE_UNUSED) override { + [[maybe_unused]] uint32_t executable_offset) override { return std::vector<debug::MethodDebugInfo>(); // No thunks added. } diff --git a/dex2oat/linker/x86/relative_patcher_x86.cc b/dex2oat/linker/x86/relative_patcher_x86.cc index a4444461a3..5b8cf4768a 100644 --- a/dex2oat/linker/x86/relative_patcher_x86.cc +++ b/dex2oat/linker/x86/relative_patcher_x86.cc @@ -56,15 +56,15 @@ void X86RelativePatcher::PatchPcRelativeReference(std::vector<uint8_t>* code, (*code)[literal_offset + 3u] = static_cast<uint8_t>(diff >> 24); } -void X86RelativePatcher::PatchEntrypointCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED) { +void X86RelativePatcher::PatchEntrypointCall([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset) { LOG(FATAL) << "UNIMPLEMENTED"; } -void X86RelativePatcher::PatchBakerReadBarrierBranch(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED) { +void X86RelativePatcher::PatchBakerReadBarrierBranch([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset) { LOG(FATAL) << "UNIMPLEMENTED"; } diff --git a/dex2oat/linker/x86/relative_patcher_x86_base.cc b/dex2oat/linker/x86/relative_patcher_x86_base.cc index 07cd724308..1104b8a6b3 100644 --- a/dex2oat/linker/x86/relative_patcher_x86_base.cc +++ b/dex2oat/linker/x86/relative_patcher_x86_base.cc @@ -23,8 +23,8 @@ namespace linker { uint32_t X86BaseRelativePatcher::ReserveSpace( uint32_t offset, - const CompiledMethod* compiled_method ATTRIBUTE_UNUSED, - MethodReference method_ref ATTRIBUTE_UNUSED) { + [[maybe_unused]] const CompiledMethod* compiled_method, + [[maybe_unused]] MethodReference method_ref) { return offset; // No space reserved; no limit on relative call distance. } @@ -32,12 +32,12 @@ uint32_t X86BaseRelativePatcher::ReserveSpaceEnd(uint32_t offset) { return offset; // No space reserved; no limit on relative call distance. } -uint32_t X86BaseRelativePatcher::WriteThunks(OutputStream* out ATTRIBUTE_UNUSED, uint32_t offset) { +uint32_t X86BaseRelativePatcher::WriteThunks([[maybe_unused]] OutputStream* out, uint32_t offset) { return offset; // No thunks added; no limit on relative call distance. } std::vector<debug::MethodDebugInfo> X86BaseRelativePatcher::GenerateThunkDebugInfo( - uint32_t executable_offset ATTRIBUTE_UNUSED) { + [[maybe_unused]] uint32_t executable_offset) { return std::vector<debug::MethodDebugInfo>(); // No thunks added. } diff --git a/dex2oat/linker/x86_64/relative_patcher_x86_64.cc b/dex2oat/linker/x86_64/relative_patcher_x86_64.cc index 629affcb99..1177417c47 100644 --- a/dex2oat/linker/x86_64/relative_patcher_x86_64.cc +++ b/dex2oat/linker/x86_64/relative_patcher_x86_64.cc @@ -34,15 +34,15 @@ void X86_64RelativePatcher::PatchPcRelativeReference(std::vector<uint8_t>* code, reinterpret_cast<unaligned_int32_t*>(&(*code)[patch.LiteralOffset()])[0] = displacement; } -void X86_64RelativePatcher::PatchEntrypointCall(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED) { +void X86_64RelativePatcher::PatchEntrypointCall([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset) { LOG(FATAL) << "UNIMPLEMENTED"; } -void X86_64RelativePatcher::PatchBakerReadBarrierBranch(std::vector<uint8_t>* code ATTRIBUTE_UNUSED, - const LinkerPatch& patch ATTRIBUTE_UNUSED, - uint32_t patch_offset ATTRIBUTE_UNUSED) { +void X86_64RelativePatcher::PatchBakerReadBarrierBranch([[maybe_unused]] std::vector<uint8_t>* code, + [[maybe_unused]] const LinkerPatch& patch, + [[maybe_unused]] uint32_t patch_offset) { LOG(FATAL) << "UNIMPLEMENTED"; } diff --git a/dex2oat/utils/swap_space.h b/dex2oat/utils/swap_space.h index aba6485c81..e4895ac017 100644 --- a/dex2oat/utils/swap_space.h +++ b/dex2oat/utils/swap_space.h @@ -176,7 +176,7 @@ class SwapAllocator { pointer address(reference x) const { return &x; } const_pointer address(const_reference x) const { return &x; } - pointer allocate(size_type n, SwapAllocator<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) { + pointer allocate(size_type n, [[maybe_unused]] SwapAllocator<void>::pointer hint = nullptr) { DCHECK_LE(n, max_size()); if (swap_space_ == nullptr) { T* result = reinterpret_cast<T*>(malloc(n * sizeof(T))); diff --git a/dex2oat/verifier_deps_test.cc b/dex2oat/verifier_deps_test.cc index bb6120004b..0b72382b21 100644 --- a/dex2oat/verifier_deps_test.cc +++ b/dex2oat/verifier_deps_test.cc @@ -46,9 +46,9 @@ class VerifierDepsCompilerCallbacks : public CompilerCallbacks { : CompilerCallbacks(CompilerCallbacks::CallbackMode::kCompileApp), deps_(nullptr) {} - void AddUncompilableMethod(MethodReference ref ATTRIBUTE_UNUSED) override {} - void AddUncompilableClass(ClassReference ref ATTRIBUTE_UNUSED) override {} - void ClassRejected(ClassReference ref ATTRIBUTE_UNUSED) override {} + void AddUncompilableMethod([[maybe_unused]] MethodReference ref) override {} + void AddUncompilableClass([[maybe_unused]] ClassReference ref) override {} + void ClassRejected([[maybe_unused]] ClassReference ref) override {} verifier::VerifierDeps* GetVerifierDeps() const override { return deps_; } void SetVerifierDeps(verifier::VerifierDeps* deps) override { deps_ = deps; } diff --git a/dexlayout/dex_ir.cc b/dexlayout/dex_ir.cc index 3917847ea7..82c03890fa 100644 --- a/dexlayout/dex_ir.cc +++ b/dexlayout/dex_ir.cc @@ -30,11 +30,9 @@ namespace art { namespace dex_ir { -static uint32_t HeaderOffset(const dex_ir::Header* header ATTRIBUTE_UNUSED) { - return 0; -} +static uint32_t HeaderOffset([[maybe_unused]] const dex_ir::Header* header) { return 0; } -static uint32_t HeaderSize(const dex_ir::Header* header ATTRIBUTE_UNUSED) { +static uint32_t HeaderSize([[maybe_unused]] const dex_ir::Header* header) { // Size is in elements, so there is only one header. return 1; } diff --git a/dexlist/dexlist.cc b/dexlist/dexlist.cc index 3603675a70..553e364c5e 100644 --- a/dexlist/dexlist.cc +++ b/dexlist/dexlist.cc @@ -84,8 +84,11 @@ static std::unique_ptr<char[]> descriptorToDot(const char* str) { * Dumps a method. */ static void dumpMethod(const DexFile* pDexFile, - const char* fileName, u4 idx, u4 flags ATTRIBUTE_UNUSED, - const dex::CodeItem* pCode, u4 codeOffset) { + const char* fileName, + u4 idx, + [[maybe_unused]] u4 flags, + const dex::CodeItem* pCode, + u4 codeOffset) { // Abstract and native methods don't get listed. if (pCode == nullptr || codeOffset == 0) { return; diff --git a/dt_fd_forward/dt_fd_forward.cc b/dt_fd_forward/dt_fd_forward.cc index 0ff87706b9..b46810ffea 100644 --- a/dt_fd_forward/dt_fd_forward.cc +++ b/dt_fd_forward/dt_fd_forward.cc @@ -691,7 +691,7 @@ static jdwpTransportError ParseAddress(const std::string& addr, class JdwpTransportFunctions { public: - static jdwpTransportError GetCapabilities(jdwpTransportEnv* env ATTRIBUTE_UNUSED, + static jdwpTransportError GetCapabilities([[maybe_unused]] jdwpTransportEnv* env, /*out*/ JDWPTransportCapabilities* capabilities_ptr) { // We don't support any of the optional capabilities (can_timeout_attach, can_timeout_accept, // can_timeout_handshake) so just return a zeroed capabilities ptr. @@ -703,8 +703,8 @@ class JdwpTransportFunctions { // Address is <sock_fd> static jdwpTransportError Attach(jdwpTransportEnv* env, const char* address, - jlong attach_timeout ATTRIBUTE_UNUSED, - jlong handshake_timeout ATTRIBUTE_UNUSED) { + [[maybe_unused]] jlong attach_timeout, + [[maybe_unused]] jlong handshake_timeout) { if (address == nullptr || *address == '\0') { return ERR(ILLEGAL_ARGUMENT); } @@ -743,8 +743,8 @@ class JdwpTransportFunctions { } static jdwpTransportError Accept(jdwpTransportEnv* env, - jlong accept_timeout ATTRIBUTE_UNUSED, - jlong handshake_timeout ATTRIBUTE_UNUSED) { + [[maybe_unused]] jlong accept_timeout, + [[maybe_unused]] jlong handshake_timeout) { return AsFdForward(env)->Accept(); } @@ -784,11 +784,10 @@ const jdwpTransportNativeInterface_ gTransportInterface = { JdwpTransportFunctions::GetLastError, }; -extern "C" -JNIEXPORT jint JNICALL jdwpTransport_OnLoad(JavaVM* vm ATTRIBUTE_UNUSED, - jdwpTransportCallback* cb, - jint version, - jdwpTransportEnv** /*out*/env) { +extern "C" JNIEXPORT jint JNICALL jdwpTransport_OnLoad([[maybe_unused]] JavaVM* vm, + jdwpTransportCallback* cb, + jint version, + jdwpTransportEnv** /*out*/ env) { if (version != JDWPTRANSPORT_VERSION_1_0) { LOG(ERROR) << "unknown version " << version; return JNI_EVERSION; diff --git a/imgdiag/imgdiag.cc b/imgdiag/imgdiag.cc index e3310e97f3..26f8a00c28 100644 --- a/imgdiag/imgdiag.cc +++ b/imgdiag/imgdiag.cc @@ -728,7 +728,7 @@ class RegionSpecializedBase<ArtMethod> : public RegionCommon<ArtMethod> { ArrayRef<uint8_t> zygote_contents, const android::procinfo::MapInfo& boot_map, const ImageHeader& image_header, - bool dump_dirty_objects ATTRIBUTE_UNUSED) + [[maybe_unused]] bool dump_dirty_objects) : RegionCommon<ArtMethod>(os, remote_contents, zygote_contents, boot_map, image_header), os_(*os) { // Prepare the table for offset to member lookups. @@ -749,12 +749,9 @@ class RegionSpecializedBase<ArtMethod> : public RegionCommon<ArtMethod> { RegionCommon<ArtMethod>::image_header_.VisitPackedArtMethods(*visitor, base, pointer_size); } - void VisitEntry(ArtMethod* method ATTRIBUTE_UNUSED) - REQUIRES_SHARED(Locks::mutator_lock_) { - } + void VisitEntry([[maybe_unused]] ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {} - void AddCleanEntry(ArtMethod* method ATTRIBUTE_UNUSED) { - } + void AddCleanEntry([[maybe_unused]] ArtMethod* method) {} void AddFalseDirtyEntry(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) { @@ -781,8 +778,8 @@ class RegionSpecializedBase<ArtMethod> : public RegionCommon<ArtMethod> { void DiffEntryContents(ArtMethod* method, uint8_t* remote_bytes, const uint8_t* base_ptr, - bool log_dirty_objects ATTRIBUTE_UNUSED, - size_t entry_offset ATTRIBUTE_UNUSED) + [[maybe_unused]] bool log_dirty_objects, + [[maybe_unused]] size_t entry_offset) REQUIRES_SHARED(Locks::mutator_lock_) { const char* tabs = " "; os_ << tabs << "ArtMethod " << ArtMethod::PrettyMethod(method) << "\n"; diff --git a/libartbase/base/allocator.cc b/libartbase/base/allocator.cc index 6393672859..71a4f6cce4 100644 --- a/libartbase/base/allocator.cc +++ b/libartbase/base/allocator.cc @@ -49,12 +49,12 @@ class NoopAllocator final : public Allocator { NoopAllocator() {} ~NoopAllocator() {} - void* Alloc(size_t size ATTRIBUTE_UNUSED) override { + void* Alloc([[maybe_unused]] size_t size) override { LOG(FATAL) << "NoopAllocator::Alloc should not be called"; UNREACHABLE(); } - void Free(void* p ATTRIBUTE_UNUSED) override { + void Free([[maybe_unused]] void* p) override { // Noop. } diff --git a/libartbase/base/allocator.h b/libartbase/base/allocator.h index 81f3a60815..24374a2b40 100644 --- a/libartbase/base/allocator.h +++ b/libartbase/base/allocator.h @@ -115,8 +115,8 @@ class TrackingAllocatorImpl : public std::allocator<T> { // Used internally by STL data structures. template <class U> - TrackingAllocatorImpl( - const TrackingAllocatorImpl<U, kTag>& alloc ATTRIBUTE_UNUSED) noexcept {} + explicit TrackingAllocatorImpl( + [[maybe_unused]] const TrackingAllocatorImpl<U, kTag>& alloc) noexcept {} // Used internally by STL data structures. TrackingAllocatorImpl() noexcept { @@ -130,7 +130,7 @@ class TrackingAllocatorImpl : public std::allocator<T> { using other = TrackingAllocatorImpl<U, kTag>; }; - pointer allocate(size_type n, const_pointer hint ATTRIBUTE_UNUSED = 0) { + pointer allocate(size_type n, [[maybe_unused]] const_pointer hint = 0) { const size_t size = n * sizeof(T); TrackedAllocators::RegisterAllocation(GetTag(), size); return reinterpret_cast<pointer>(malloc(size)); diff --git a/libartbase/base/arena_allocator.h b/libartbase/base/arena_allocator.h index c4f713a1bf..10f7f3183d 100644 --- a/libartbase/base/arena_allocator.h +++ b/libartbase/base/arena_allocator.h @@ -120,13 +120,13 @@ class ArenaAllocatorStatsImpl<false> { ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default; ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete; - void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {} - void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {} + void Copy([[maybe_unused]] const ArenaAllocatorStatsImpl& other) {} + void RecordAlloc([[maybe_unused]] size_t bytes, [[maybe_unused]] ArenaAllocKind kind) {} size_t NumAllocations() const { return 0u; } size_t BytesAllocated() const { return 0u; } - void Dump(std::ostream& os ATTRIBUTE_UNUSED, - const Arena* first ATTRIBUTE_UNUSED, - ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {} + void Dump([[maybe_unused]] std::ostream& os, + [[maybe_unused]] const Arena* first, + [[maybe_unused]] ssize_t lost_bytes_adjustment) const {} }; template <bool kCount> diff --git a/libartbase/base/arena_bit_vector.cc b/libartbase/base/arena_bit_vector.cc index 138a5df77f..e7acb60379 100644 --- a/libartbase/base/arena_bit_vector.cc +++ b/libartbase/base/arena_bit_vector.cc @@ -28,7 +28,7 @@ template <> class ArenaBitVectorAllocatorKindImpl<false> { public: // Not tracking allocations, ignore the supplied kind and arbitrarily provide kArenaAllocSTL. - explicit ArenaBitVectorAllocatorKindImpl(ArenaAllocKind kind ATTRIBUTE_UNUSED) {} + explicit ArenaBitVectorAllocatorKindImpl([[maybe_unused]] ArenaAllocKind kind) {} ArenaBitVectorAllocatorKindImpl(const ArenaBitVectorAllocatorKindImpl&) = default; ArenaBitVectorAllocatorKindImpl& operator=(const ArenaBitVectorAllocatorKindImpl&) = default; ArenaAllocKind Kind() { return kArenaAllocGrowableBitMap; } diff --git a/libartbase/base/arena_containers.h b/libartbase/base/arena_containers.h index f205bc44a3..db1d0405b9 100644 --- a/libartbase/base/arena_containers.h +++ b/libartbase/base/arena_containers.h @@ -109,7 +109,7 @@ template <> class ArenaAllocatorAdapterKindImpl<false> { public: // Not tracking allocations, ignore the supplied kind and arbitrarily provide kArenaAllocSTL. - explicit ArenaAllocatorAdapterKindImpl(ArenaAllocKind kind ATTRIBUTE_UNUSED) {} + explicit ArenaAllocatorAdapterKindImpl([[maybe_unused]] ArenaAllocKind kind) {} ArenaAllocatorAdapterKindImpl(const ArenaAllocatorAdapterKindImpl&) = default; ArenaAllocatorAdapterKindImpl& operator=(const ArenaAllocatorAdapterKindImpl&) = default; ArenaAllocKind Kind() { return kArenaAllocSTL; } @@ -199,7 +199,7 @@ class ArenaAllocatorAdapter : private ArenaAllocatorAdapterKind { const_pointer address(const_reference x) const { return &x; } pointer allocate(size_type n, - ArenaAllocatorAdapter<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) { + [[maybe_unused]] ArenaAllocatorAdapter<void>::pointer hint = nullptr) { DCHECK_LE(n, max_size()); return allocator_->AllocArray<T>(n, ArenaAllocatorAdapterKind::Kind()); } diff --git a/libartbase/base/bit_memory_region.h b/libartbase/base/bit_memory_region.h index baac2f50ea..f3dbd63d17 100644 --- a/libartbase/base/bit_memory_region.h +++ b/libartbase/base/bit_memory_region.h @@ -157,11 +157,11 @@ class BitMemoryRegion final : public ValueObject { ALWAYS_INLINE void CopyBits(const BitMemoryRegion& src) { DCHECK_EQ(size_in_bits(), src.size_in_bits()); // Hopefully, the loads of the unused `value` shall be optimized away. - VisitChunks( - [this, &src](size_t offset, size_t num_bits, size_t value ATTRIBUTE_UNUSED) ALWAYS_INLINE { - StoreChunk(offset, src.LoadBits(offset, num_bits), num_bits); - return true; - }); + VisitChunks([this, &src](size_t offset, size_t num_bits, [[maybe_unused]] size_t value) + ALWAYS_INLINE { + StoreChunk(offset, src.LoadBits(offset, num_bits), num_bits); + return true; + }); } // And bits from other bit region. @@ -194,9 +194,8 @@ class BitMemoryRegion final : public ValueObject { // Count the number of set bits within this region. ALWAYS_INLINE size_t PopCount() const { size_t result = 0u; - VisitChunks([&](size_t offset ATTRIBUTE_UNUSED, - size_t num_bits ATTRIBUTE_UNUSED, - size_t value) ALWAYS_INLINE { + VisitChunks([&]([[maybe_unused]] size_t offset, [[maybe_unused]] size_t num_bits, size_t value) + ALWAYS_INLINE { result += POPCOUNT(value); return true; }); @@ -210,11 +209,9 @@ class BitMemoryRegion final : public ValueObject { // Check if this region has all bits clear. ALWAYS_INLINE bool HasAllBitsClear() const { - return VisitChunks([](size_t offset ATTRIBUTE_UNUSED, - size_t num_bits ATTRIBUTE_UNUSED, - size_t value) ALWAYS_INLINE { - return value == 0u; - }); + return VisitChunks( + []([[maybe_unused]] size_t offset, [[maybe_unused]] size_t num_bits, size_t value) + ALWAYS_INLINE { return value == 0u; }); } // Check if this region has any bit set. diff --git a/libartbase/base/debug_stack.h b/libartbase/base/debug_stack.h index 4bbaee88f1..f3ee310afc 100644 --- a/libartbase/base/debug_stack.h +++ b/libartbase/base/debug_stack.h @@ -55,7 +55,7 @@ class DebugStackRefCounterImpl<false> { template <> class DebugStackReferenceImpl<false> { public: - explicit DebugStackReferenceImpl(DebugStackRefCounterImpl<false>* counter ATTRIBUTE_UNUSED) {} + explicit DebugStackReferenceImpl([[maybe_unused]] DebugStackRefCounterImpl<false>* counter) {} DebugStackReferenceImpl(const DebugStackReferenceImpl& other) = default; DebugStackReferenceImpl& operator=(const DebugStackReferenceImpl& other) = default; void CheckTop() { } @@ -64,7 +64,7 @@ class DebugStackReferenceImpl<false> { template <> class DebugStackIndirectTopRefImpl<false> { public: - explicit DebugStackIndirectTopRefImpl(DebugStackReferenceImpl<false>* ref ATTRIBUTE_UNUSED) {} + explicit DebugStackIndirectTopRefImpl([[maybe_unused]] DebugStackReferenceImpl<false>* ref) {} DebugStackIndirectTopRefImpl(const DebugStackIndirectTopRefImpl& other) = default; DebugStackIndirectTopRefImpl& operator=(const DebugStackIndirectTopRefImpl& other) = default; void CheckTop() { } diff --git a/libartbase/base/hash_set.h b/libartbase/base/hash_set.h index 3f3c8f2494..fec9440ebe 100644 --- a/libartbase/base/hash_set.h +++ b/libartbase/base/hash_set.h @@ -502,10 +502,10 @@ class HashSet { // Insert an element with hint. // Note: The hint is not very useful for a HashSet<> unless there are many hash conflicts // and in that case the use of HashSet<> itself should be reconsidered. - std::pair<iterator, bool> insert(const_iterator hint ATTRIBUTE_UNUSED, const T& element) { + std::pair<iterator, bool> insert([[maybe_unused]] const_iterator hint, const T& element) { return insert(element); } - std::pair<iterator, bool> insert(const_iterator hint ATTRIBUTE_UNUSED, T&& element) { + std::pair<iterator, bool> insert([[maybe_unused]] const_iterator hint, T&& element) { return insert(std::move(element)); } @@ -710,7 +710,7 @@ class HashSet { if (UNLIKELY(NumBuckets() == 0)) { return 0; } - auto fail_fn = [&](size_t index ATTRIBUTE_UNUSED) ALWAYS_INLINE { return NumBuckets(); }; + auto fail_fn = [&]([[maybe_unused]] size_t index) ALWAYS_INLINE { return NumBuckets(); }; return FindIndexImpl(element, hash, fail_fn); } diff --git a/libartbase/base/intrusive_forward_list.h b/libartbase/base/intrusive_forward_list.h index 2e66f3e218..06dd4079ef 100644 --- a/libartbase/base/intrusive_forward_list.h +++ b/libartbase/base/intrusive_forward_list.h @@ -35,9 +35,9 @@ struct IntrusiveForwardListHook { explicit IntrusiveForwardListHook(const IntrusiveForwardListHook* hook) : next_hook(hook) { } // Allow copyable values but do not copy the hook, it is not part of the value. - IntrusiveForwardListHook(const IntrusiveForwardListHook& other ATTRIBUTE_UNUSED) - : next_hook(nullptr) { } - IntrusiveForwardListHook& operator=(const IntrusiveForwardListHook& src ATTRIBUTE_UNUSED) { + explicit IntrusiveForwardListHook([[maybe_unused]] const IntrusiveForwardListHook& other) + : next_hook(nullptr) {} + IntrusiveForwardListHook& operator=([[maybe_unused]] const IntrusiveForwardListHook& src) { return *this; } diff --git a/libartbase/base/intrusive_forward_list_test.cc b/libartbase/base/intrusive_forward_list_test.cc index 595210b754..180c3d6667 100644 --- a/libartbase/base/intrusive_forward_list_test.cc +++ b/libartbase/base/intrusive_forward_list_test.cc @@ -578,7 +578,7 @@ void IntrusiveForwardListTest::Remove() { ref.remove_if(odd); ifl.remove_if(odd); ASSERT_LISTS_EQUAL(ref, ifl); - auto all = [](ValueType value ATTRIBUTE_UNUSED) { return true; }; + auto all = []([[maybe_unused]] ValueType value) { return true; }; ref.remove_if(all); ifl.remove_if(all); ASSERT_LISTS_EQUAL(ref, ifl); diff --git a/libartbase/base/membarrier.cc b/libartbase/base/membarrier.cc index 48f47dff44..243b908b7b 100644 --- a/libartbase/base/membarrier.cc +++ b/libartbase/base/membarrier.cc @@ -75,7 +75,7 @@ int membarrier(MembarrierCommand command) { #else // __NR_membarrier -int membarrier(MembarrierCommand command ATTRIBUTE_UNUSED) { +int membarrier([[maybe_unused]] MembarrierCommand command) { // In principle this could be supported on linux, but Android's prebuilt glibc does not include // the system call number defintions (b/111199492). errno = ENOSYS; diff --git a/libartbase/base/memfd.cc b/libartbase/base/memfd.cc index 8512a3ae13..e96391a01f 100644 --- a/libartbase/base/memfd.cc +++ b/libartbase/base/memfd.cc @@ -68,7 +68,7 @@ int memfd_create(const char* name, unsigned int flags) { #else // __NR_memfd_create -int memfd_create(const char* name ATTRIBUTE_UNUSED, unsigned int flags ATTRIBUTE_UNUSED) { +int memfd_create([[maybe_unused]] const char* name, [[maybe_unused]] unsigned int flags) { errno = ENOSYS; return -1; } diff --git a/libartbase/base/metrics/metrics_test.cc b/libartbase/base/metrics/metrics_test.cc index 2d69c95b69..61dfddd273 100644 --- a/libartbase/base/metrics/metrics_test.cc +++ b/libartbase/base/metrics/metrics_test.cc @@ -272,13 +272,13 @@ TEST_F(MetricsTest, ResetMetrics) { class NonZeroBackend : public TestBackendBase { public: - void ReportCounter(DatumId counter_type [[maybe_unused]], uint64_t value) override { + void ReportCounter([[maybe_unused]] DatumId counter_type, uint64_t value) override { EXPECT_NE(value, 0u); } - void ReportHistogram(DatumId histogram_type [[maybe_unused]], - int64_t minimum_value [[maybe_unused]], - int64_t maximum_value [[maybe_unused]], + void ReportHistogram([[maybe_unused]] DatumId histogram_type, + [[maybe_unused]] int64_t minimum_value, + [[maybe_unused]] int64_t maximum_value, const std::vector<uint32_t>& buckets) override { bool nonzero = false; for (const auto value : buckets) { @@ -296,13 +296,13 @@ TEST_F(MetricsTest, ResetMetrics) { class ZeroBackend : public TestBackendBase { public: - void ReportCounter(DatumId counter_type [[maybe_unused]], uint64_t value) override { + void ReportCounter([[maybe_unused]] DatumId counter_type, uint64_t value) override { EXPECT_EQ(value, 0u); } - void ReportHistogram(DatumId histogram_type [[maybe_unused]], - int64_t minimum_value [[maybe_unused]], - int64_t maximum_value [[maybe_unused]], + void ReportHistogram([[maybe_unused]] DatumId histogram_type, + [[maybe_unused]] int64_t minimum_value, + [[maybe_unused]] int64_t maximum_value, const std::vector<uint32_t>& buckets) override { for (const auto value : buckets) { EXPECT_EQ(value, 0u); @@ -323,13 +323,13 @@ TEST_F(MetricsTest, KeepEventMetricsResetValueMetricsAfterReporting) { class FirstBackend : public TestBackendBase { public: - void ReportCounter(DatumId counter_type [[maybe_unused]], uint64_t value) override { + void ReportCounter([[maybe_unused]] DatumId counter_type, uint64_t value) override { EXPECT_NE(value, 0u); } - void ReportHistogram(DatumId histogram_type [[maybe_unused]], - int64_t minimum_value [[maybe_unused]], - int64_t maximum_value [[maybe_unused]], + void ReportHistogram([[maybe_unused]] DatumId histogram_type, + [[maybe_unused]] int64_t minimum_value, + [[maybe_unused]] int64_t maximum_value, const std::vector<uint32_t>& buckets) override { EXPECT_NE(buckets[0], 0u) << "Bucket 0 should have a non-zero value"; for (size_t i = 1; i < buckets.size(); i++) { @@ -368,9 +368,9 @@ TEST_F(MetricsTest, KeepEventMetricsResetValueMetricsAfterReporting) { } // All histograms are event metrics. - void ReportHistogram(DatumId histogram_type [[maybe_unused]], - int64_t minimum_value [[maybe_unused]], - int64_t maximum_value [[maybe_unused]], + void ReportHistogram([[maybe_unused]] DatumId histogram_type, + [[maybe_unused]] int64_t minimum_value, + [[maybe_unused]] int64_t maximum_value, const std::vector<uint32_t>& buckets) override { EXPECT_NE(buckets[0], 0u) << "Bucket 0 should have a non-zero value"; for (size_t i = 1; i < buckets.size(); i++) { diff --git a/libartbase/base/scoped_arena_allocator.h b/libartbase/base/scoped_arena_allocator.h index 6de01921fa..165fb8c6d8 100644 --- a/libartbase/base/scoped_arena_allocator.h +++ b/libartbase/base/scoped_arena_allocator.h @@ -171,7 +171,7 @@ class ScopedArenaAllocator size_t ApproximatePeakBytes(); // Allow a delete-expression to destroy but not deallocate allocators created by Create(). - static void operator delete(void* ptr ATTRIBUTE_UNUSED) {} + static void operator delete([[maybe_unused]] void* ptr) {} private: ArenaStack* arena_stack_; diff --git a/libartbase/base/scoped_arena_containers.h b/libartbase/base/scoped_arena_containers.h index 5f0cfe6ce6..d0ff7f535d 100644 --- a/libartbase/base/scoped_arena_containers.h +++ b/libartbase/base/scoped_arena_containers.h @@ -185,7 +185,7 @@ class ScopedArenaAllocatorAdapter const_pointer address(const_reference x) const { return &x; } pointer allocate(size_type n, - ScopedArenaAllocatorAdapter<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) { + [[maybe_unused]] ScopedArenaAllocatorAdapter<void>::pointer hint = nullptr) { DCHECK_LE(n, max_size()); DebugStackIndirectTopRef::CheckTop(); return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T), @@ -273,7 +273,7 @@ class ArenaDelete { template <typename T> class ArenaDelete<T[]> { public: - void operator()(T* ptr ATTRIBUTE_UNUSED) const { + void operator()([[maybe_unused]] T* ptr) const { static_assert(std::is_trivially_destructible_v<T>, "ArenaUniquePtr does not support non-trivially-destructible arrays."); // TODO: Implement debug checks, and MEMORY_TOOL support. diff --git a/libartbase/base/transform_array_ref_test.cc b/libartbase/base/transform_array_ref_test.cc index 4ac697848a..e1d8d52468 100644 --- a/libartbase/base/transform_array_ref_test.cc +++ b/libartbase/base/transform_array_ref_test.cc @@ -30,7 +30,7 @@ struct ValueHolder { int value; }; -ATTRIBUTE_UNUSED bool operator==(const ValueHolder& lhs, const ValueHolder& rhs) { +[[maybe_unused]] bool operator==(const ValueHolder& lhs, const ValueHolder& rhs) { return lhs.value == rhs.value; } diff --git a/libartbase/base/unix_file/random_access_file_test.h b/libartbase/base/unix_file/random_access_file_test.h index 178f89d5a5..0592256291 100644 --- a/libartbase/base/unix_file/random_access_file_test.h +++ b/libartbase/base/unix_file/random_access_file_test.h @@ -171,8 +171,7 @@ class RandomAccessFileTest : public testing::Test { CleanUp(file.get()); } - virtual void CleanUp(RandomAccessFile* file ATTRIBUTE_UNUSED) { - } + virtual void CleanUp([[maybe_unused]] RandomAccessFile* file) {} protected: std::string android_data_; diff --git a/libartbase/base/utils.h b/libartbase/base/utils.h index f311f09975..5e04cb0074 100644 --- a/libartbase/base/utils.h +++ b/libartbase/base/utils.h @@ -75,16 +75,13 @@ void GetTaskStats(pid_t tid, char* state, int* utime, int* stime, int* task_cpu) class VoidFunctor { public: template <typename A> - inline void operator() (A a ATTRIBUTE_UNUSED) const { - } + inline void operator()([[maybe_unused]] A a) const {} template <typename A, typename B> - inline void operator() (A a ATTRIBUTE_UNUSED, B b ATTRIBUTE_UNUSED) const { - } + inline void operator()([[maybe_unused]] A a, [[maybe_unused]] B b) const {} template <typename A, typename B, typename C> - inline void operator() (A a ATTRIBUTE_UNUSED, B b ATTRIBUTE_UNUSED, C c ATTRIBUTE_UNUSED) const { - } + inline void operator()([[maybe_unused]] A a, [[maybe_unused]] B b, [[maybe_unused]] C c) const {} }; inline bool TestBitmap(size_t idx, const uint8_t* bitmap) { diff --git a/libartpalette/system/palette_fake.cc b/libartpalette/system/palette_fake.cc index 743a4db37e..accdc4ceb4 100644 --- a/libartpalette/system/palette_fake.cc +++ b/libartpalette/system/palette_fake.cc @@ -14,15 +14,13 @@ * limitations under the License. */ -#include "palette/palette.h" +#include <android-base/logging.h> +#include <stdbool.h> #include <map> #include <mutex> -#include <stdbool.h> - -#include <android-base/logging.h> -#include <android-base/macros.h> // For ATTRIBUTE_UNUSED +#include "palette/palette.h" #include "palette_system.h" // Methods in version 1 API, corresponding to SDK level 31. @@ -61,28 +59,25 @@ palette_status_t PaletteTraceEnabled(/*out*/bool* enabled) { return PALETTE_STATUS_OK; } -palette_status_t PaletteTraceBegin(const char* name ATTRIBUTE_UNUSED) { - return PALETTE_STATUS_OK; -} +palette_status_t PaletteTraceBegin([[maybe_unused]] const char* name) { return PALETTE_STATUS_OK; } palette_status_t PaletteTraceEnd() { return PALETTE_STATUS_OK; } -palette_status_t PaletteTraceIntegerValue(const char* name ATTRIBUTE_UNUSED, - int32_t value ATTRIBUTE_UNUSED) { +palette_status_t PaletteTraceIntegerValue([[maybe_unused]] const char* name, + [[maybe_unused]] int32_t value) { return PALETTE_STATUS_OK; } -palette_status_t PaletteAshmemCreateRegion(const char* name ATTRIBUTE_UNUSED, - size_t size ATTRIBUTE_UNUSED, +palette_status_t PaletteAshmemCreateRegion([[maybe_unused]] const char* name, + [[maybe_unused]] size_t size, int* fd) { *fd = -1; return PALETTE_STATUS_NOT_SUPPORTED; } -palette_status_t PaletteAshmemSetProtRegion(int fd ATTRIBUTE_UNUSED, - int prot ATTRIBUTE_UNUSED) { +palette_status_t PaletteAshmemSetProtRegion([[maybe_unused]] int fd, [[maybe_unused]] int prot) { return PALETTE_STATUS_NOT_SUPPORTED; } @@ -96,25 +91,25 @@ palette_status_t PaletteShouldReportDex2oatCompilation(bool* value) { return PALETTE_STATUS_OK; } -palette_status_t PaletteNotifyStartDex2oatCompilation(int source_fd ATTRIBUTE_UNUSED, - int art_fd ATTRIBUTE_UNUSED, - int oat_fd ATTRIBUTE_UNUSED, - int vdex_fd ATTRIBUTE_UNUSED) { +palette_status_t PaletteNotifyStartDex2oatCompilation([[maybe_unused]] int source_fd, + [[maybe_unused]] int art_fd, + [[maybe_unused]] int oat_fd, + [[maybe_unused]] int vdex_fd) { return PALETTE_STATUS_OK; } -palette_status_t PaletteNotifyEndDex2oatCompilation(int source_fd ATTRIBUTE_UNUSED, - int art_fd ATTRIBUTE_UNUSED, - int oat_fd ATTRIBUTE_UNUSED, - int vdex_fd ATTRIBUTE_UNUSED) { +palette_status_t PaletteNotifyEndDex2oatCompilation([[maybe_unused]] int source_fd, + [[maybe_unused]] int art_fd, + [[maybe_unused]] int oat_fd, + [[maybe_unused]] int vdex_fd) { return PALETTE_STATUS_OK; } -palette_status_t PaletteNotifyDexFileLoaded(const char* path ATTRIBUTE_UNUSED) { +palette_status_t PaletteNotifyDexFileLoaded([[maybe_unused]] const char* path) { return PALETTE_STATUS_OK; } -palette_status_t PaletteNotifyOatFileLoaded(const char* path ATTRIBUTE_UNUSED) { +palette_status_t PaletteNotifyOatFileLoaded([[maybe_unused]] const char* path) { return PALETTE_STATUS_OK; } @@ -123,33 +118,33 @@ palette_status_t PaletteShouldReportJniInvocations(bool* value) { return PALETTE_STATUS_OK; } -palette_status_t PaletteNotifyBeginJniInvocation(JNIEnv* env ATTRIBUTE_UNUSED) { +palette_status_t PaletteNotifyBeginJniInvocation([[maybe_unused]] JNIEnv* env) { return PALETTE_STATUS_OK; } -palette_status_t PaletteNotifyEndJniInvocation(JNIEnv* env ATTRIBUTE_UNUSED) { +palette_status_t PaletteNotifyEndJniInvocation([[maybe_unused]] JNIEnv* env) { return PALETTE_STATUS_OK; } // Methods in version 2 API, corresponding to SDK level 33. -palette_status_t PaletteReportLockContention(JNIEnv* env ATTRIBUTE_UNUSED, - int32_t wait_ms ATTRIBUTE_UNUSED, - const char* filename ATTRIBUTE_UNUSED, - int32_t line_number ATTRIBUTE_UNUSED, - const char* method_name ATTRIBUTE_UNUSED, - const char* owner_filename ATTRIBUTE_UNUSED, - int32_t owner_line_number ATTRIBUTE_UNUSED, - const char* owner_method_name ATTRIBUTE_UNUSED, - const char* proc_name ATTRIBUTE_UNUSED, - const char* thread_name ATTRIBUTE_UNUSED) { +palette_status_t PaletteReportLockContention([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] int32_t wait_ms, + [[maybe_unused]] const char* filename, + [[maybe_unused]] int32_t line_number, + [[maybe_unused]] const char* method_name, + [[maybe_unused]] const char* owner_filename, + [[maybe_unused]] int32_t owner_line_number, + [[maybe_unused]] const char* owner_method_name, + [[maybe_unused]] const char* proc_name, + [[maybe_unused]] const char* thread_name) { return PALETTE_STATUS_OK; } // Methods in version 3 API, corresponding to SDK level 34. -palette_status_t PaletteSetTaskProfiles(int32_t tid ATTRIBUTE_UNUSED, - const char* const profiles[] ATTRIBUTE_UNUSED, - size_t profiles_len ATTRIBUTE_UNUSED) { +palette_status_t PaletteSetTaskProfiles([[maybe_unused]] int32_t tid, + [[maybe_unused]] const char* const profiles[], + [[maybe_unused]] size_t profiles_len) { return PALETTE_STATUS_OK; } diff --git a/libdexfile/dex/code_item_accessors-inl.h b/libdexfile/dex/code_item_accessors-inl.h index 1e33002b4a..b74046071f 100644 --- a/libdexfile/dex/code_item_accessors-inl.h +++ b/libdexfile/dex/code_item_accessors-inl.h @@ -178,8 +178,7 @@ inline void CodeItemDebugInfoAccessor::Init<CompactDexFile::CodeItem>( template <> inline void CodeItemDebugInfoAccessor::Init<StandardDexFile::CodeItem>( - const StandardDexFile::CodeItem& code_item, - uint32_t dex_method_index ATTRIBUTE_UNUSED) { + const StandardDexFile::CodeItem& code_item, [[maybe_unused]] uint32_t dex_method_index) { debug_info_offset_ = code_item.debug_info_off_; CodeItemDataAccessor::Init(code_item); } diff --git a/libdexfile/dex/dex_file_verifier.cc b/libdexfile/dex/dex_file_verifier.cc index 42eff2ce2a..e16f72bb38 100644 --- a/libdexfile/dex/dex_file_verifier.cc +++ b/libdexfile/dex/dex_file_verifier.cc @@ -50,9 +50,7 @@ constexpr bool IsValidOrNoTypeId(uint16_t low, uint16_t high) { return (high == 0) || ((high == 0xffffU) && (low == 0xffffU)); } -constexpr bool IsValidTypeId(uint16_t low ATTRIBUTE_UNUSED, uint16_t high) { - return (high == 0); -} +constexpr bool IsValidTypeId([[maybe_unused]] uint16_t low, uint16_t high) { return (high == 0); } constexpr uint32_t MapTypeToBitMask(DexFile::MapItemType map_item_type) { switch (map_item_type) { diff --git a/libdexfile/dex/dex_file_verifier_test.cc b/libdexfile/dex/dex_file_verifier_test.cc index d31635ef05..a2f2e93f2f 100644 --- a/libdexfile/dex/dex_file_verifier_test.cc +++ b/libdexfile/dex/dex_file_verifier_test.cc @@ -1454,7 +1454,7 @@ TEST_F(DexFileVerifierTest, ClassExtendsItself) { VerifyModification( kClassExtendsItselfTestDex, "class_extends_itself", - [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ }, + []([[maybe_unused]] DexFile* dex_file) { /* empty */ }, "Class with same type idx as its superclass: '0'"); } @@ -1479,7 +1479,7 @@ TEST_F(DexFileVerifierTest, ClassesExtendOneAnother) { VerifyModification( kClassesExtendOneAnotherTestDex, "classes_extend_one_another", - [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ }, + []([[maybe_unused]] DexFile* dex_file) { /* empty */ }, "Invalid class definition ordering: class with type idx: '1' defined before" " superclass with type idx: '0'"); } @@ -1511,7 +1511,7 @@ TEST_F(DexFileVerifierTest, CircularClassInheritance) { VerifyModification( kCircularClassInheritanceTestDex, "circular_class_inheritance", - [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ }, + []([[maybe_unused]] DexFile* dex_file) { /* empty */ }, "Invalid class definition ordering: class with type idx: '1' defined before" " superclass with type idx: '0'"); } @@ -1534,7 +1534,7 @@ TEST_F(DexFileVerifierTest, InterfaceImplementsItself) { VerifyModification( kInterfaceImplementsItselfTestDex, "interface_implements_itself", - [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ }, + []([[maybe_unused]] DexFile* dex_file) { /* empty */ }, "Class with same type idx as implemented interface: '0'"); } @@ -1562,7 +1562,7 @@ TEST_F(DexFileVerifierTest, InterfacesImplementOneAnother) { VerifyModification( kInterfacesImplementOneAnotherTestDex, "interfaces_implement_one_another", - [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ }, + []([[maybe_unused]] DexFile* dex_file) { /* empty */ }, "Invalid class definition ordering: class with type idx: '1' defined before" " implemented interface with type idx: '0'"); } @@ -1598,7 +1598,7 @@ TEST_F(DexFileVerifierTest, CircularInterfaceImplementation) { VerifyModification( kCircularInterfaceImplementationTestDex, "circular_interface_implementation", - [](DexFile* dex_file ATTRIBUTE_UNUSED) { /* empty */ }, + []([[maybe_unused]] DexFile* dex_file) { /* empty */ }, "Invalid class definition ordering: class with type idx: '2' defined before" " implemented interface with type idx: '0'"); } diff --git a/libdexfile/dex/utf.cc b/libdexfile/dex/utf.cc index 9692a26827..bcda8ca5bf 100644 --- a/libdexfile/dex/utf.cc +++ b/libdexfile/dex/utf.cc @@ -209,7 +209,7 @@ int CompareModifiedUtf8ToUtf16AsCodePointValues(const char* utf8, const uint16_t size_t CountModifiedUtf8BytesInUtf16(const uint16_t* chars, size_t char_count) { // FIXME: We should not emit 4-byte sequences. Bug: 192935764 size_t result = 0; - auto append = [&](char c ATTRIBUTE_UNUSED) { ++result; }; + auto append = [&]([[maybe_unused]] char c) { ++result; }; ConvertUtf16ToUtf8</*kUseShortZero=*/ false, /*kUse4ByteSequence=*/ true, /*kReplaceBadSurrogates=*/ false>(chars, char_count, append); diff --git a/libelffile/elf/elf_utils.h b/libelffile/elf/elf_utils.h index 46b25b0a07..e101920ef8 100644 --- a/libelffile/elf/elf_utils.h +++ b/libelffile/elf/elf_utils.h @@ -96,7 +96,7 @@ static inline void SetBindingAndType(Elf32_Sym* sym, unsigned char b, unsigned c } static inline bool IsDynamicSectionPointer(Elf32_Word d_tag, - Elf32_Word e_machine ATTRIBUTE_UNUSED) { + [[maybe_unused]] Elf32_Word e_machine) { // TODO: Remove the `e_machine` parameter from API (not needed after Mips target was removed). switch (d_tag) { // case 1: well known d_tag values that imply Elf32_Dyn.d_un contains an address in d_ptr diff --git a/libprofile/profile/profile_compilation_info.cc b/libprofile/profile/profile_compilation_info.cc index 30b0a1ca91..af007d1962 100644 --- a/libprofile/profile/profile_compilation_info.cc +++ b/libprofile/profile/profile_compilation_info.cc @@ -2507,8 +2507,7 @@ bool ProfileCompilationInfo::UpdateProfileKeys( } bool ProfileCompilationInfo::ProfileFilterFnAcceptAll( - const std::string& dex_location ATTRIBUTE_UNUSED, - uint32_t checksum ATTRIBUTE_UNUSED) { + [[maybe_unused]] const std::string& dex_location, [[maybe_unused]] uint32_t checksum) { return true; } diff --git a/odrefresh/odr_fs_utils.cc b/odrefresh/odr_fs_utils.cc index 3ed8021037..2a77e52a6d 100644 --- a/odrefresh/odr_fs_utils.cc +++ b/odrefresh/odr_fs_utils.cc @@ -43,7 +43,7 @@ namespace odrefresh { // Callback for use with nftw(3) to assist with clearing files and sub-directories. // This method removes files and directories below the top-level directory passed to nftw(). static int NftwCleanUpCallback(const char* fpath, - const struct stat* sb ATTRIBUTE_UNUSED, + [[maybe_unused]] const struct stat* sb, int typeflag, struct FTW* ftwbuf) { switch (typeflag) { diff --git a/odrefresh/odrefresh_broken.cc b/odrefresh/odrefresh_broken.cc index 7b7d095df7..6657a10f8a 100644 --- a/odrefresh/odrefresh_broken.cc +++ b/odrefresh/odrefresh_broken.cc @@ -17,8 +17,7 @@ #include <android-base/macros.h> #include <odrefresh/odrefresh.h> - -int main(int argc ATTRIBUTE_UNUSED, char** argv ATTRIBUTE_UNUSED) { +int main([[maybe_unused]] int argc, [[maybe_unused]] char** argv) { // Return a value that will make odsign just cleanup all potential existing /data // artifacts. return art::odrefresh::ExitCode::kCleanupFailed; diff --git a/openjdkjvm/OpenjdkJvm.cc b/openjdkjvm/OpenjdkJvm.cc index da6fc85f54..c0f874f613 100644 --- a/openjdkjvm/OpenjdkJvm.cc +++ b/openjdkjvm/OpenjdkJvm.cc @@ -204,8 +204,7 @@ JNIEXPORT void* JVM_FindLibraryEntry(void* handle, const char* name) { return dlsym(handle, name); } -JNIEXPORT jlong JVM_CurrentTimeMillis(JNIEnv* env ATTRIBUTE_UNUSED, - jclass clazz ATTRIBUTE_UNUSED) { +JNIEXPORT jlong JVM_CurrentTimeMillis([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass clazz) { struct timeval tv; gettimeofday(&tv, (struct timezone *) nullptr); jlong when = tv.tv_sec * 1000LL + tv.tv_usec / 1000; @@ -216,9 +215,9 @@ JNIEXPORT jlong JVM_CurrentTimeMillis(JNIEnv* env ATTRIBUTE_UNUSED, * See the spec of this function in jdk.internal.misc.VM. * @return -1 if the system time isn't within +/- 2^32 seconds from offset_secs */ -JNIEXPORT jlong JVM_GetNanoTimeAdjustment(JNIEnv *ATTRIBUTE_UNUSED, - jclass ATTRIBUTE_UNUSED, - jlong offset_secs) { +JNIEXPORT jlong JVM_GetNanoTimeAdjustment([[maybe_unused]] JNIEnv*, + [[maybe_unused]] jclass, + jlong offset_secs) { struct timeval tv; // Note that we don't want the elapsed time here, but the system clock. // gettimeofday() doesn't provide nanosecond-level precision. @@ -388,19 +387,21 @@ JNIEXPORT void JVM_SetThreadPriority(JNIEnv* env, jobject jthread, jint prio) { } } -JNIEXPORT void JVM_Yield(JNIEnv* env ATTRIBUTE_UNUSED, jclass threadClass ATTRIBUTE_UNUSED) { +JNIEXPORT void JVM_Yield([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass threadClass) { sched_yield(); } -JNIEXPORT void JVM_Sleep(JNIEnv* env, jclass threadClass ATTRIBUTE_UNUSED, - jobject java_lock, jlong millis) { +JNIEXPORT void JVM_Sleep(JNIEnv* env, + [[maybe_unused]] jclass threadClass, + jobject java_lock, + jlong millis) { art::ScopedFastNativeObjectAccess soa(env); art::ObjPtr<art::mirror::Object> lock = soa.Decode<art::mirror::Object>(java_lock); art::Monitor::Wait( art::Thread::Current(), lock.Ptr(), millis, 0, true, art::ThreadState::kSleeping); } -JNIEXPORT jobject JVM_CurrentThread(JNIEnv* env, jclass unused ATTRIBUTE_UNUSED) { +JNIEXPORT jobject JVM_CurrentThread(JNIEnv* env, [[maybe_unused]] jclass unused) { art::ScopedFastNativeObjectAccess soa(env); return soa.AddLocalReference<jobject>(soa.Self()->GetPeer()); } @@ -425,7 +426,7 @@ JNIEXPORT jboolean JVM_IsInterrupted(JNIEnv* env, jobject jthread, jboolean clea } } -JNIEXPORT jboolean JVM_HoldsLock(JNIEnv* env, jclass unused ATTRIBUTE_UNUSED, jobject jobj) { +JNIEXPORT jboolean JVM_HoldsLock(JNIEnv* env, [[maybe_unused]] jclass unused, jobject jobj) { art::ScopedObjectAccess soa(env); art::ObjPtr<art::mirror::Object> object = soa.Decode<art::mirror::Object>(jobj); if (object == nullptr) { @@ -436,20 +437,21 @@ JNIEXPORT jboolean JVM_HoldsLock(JNIEnv* env, jclass unused ATTRIBUTE_UNUSED, jo } JNIEXPORT __attribute__((noreturn)) void JVM_SetNativeThreadName( - JNIEnv* env ATTRIBUTE_UNUSED, - jobject jthread ATTRIBUTE_UNUSED, - jstring java_name ATTRIBUTE_UNUSED) { + [[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jobject jthread, + [[maybe_unused]] jstring java_name) { UNIMPLEMENTED(FATAL) << "JVM_SetNativeThreadName is not implemented"; UNREACHABLE(); } -JNIEXPORT __attribute__((noreturn)) jint JVM_IHashCode(JNIEnv* env ATTRIBUTE_UNUSED, - jobject javaObject ATTRIBUTE_UNUSED) { +JNIEXPORT __attribute__((noreturn)) jint JVM_IHashCode([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jobject javaObject) { UNIMPLEMENTED(FATAL) << "JVM_IHashCode is not implemented"; UNREACHABLE(); } -JNIEXPORT __attribute__((noreturn)) jlong JVM_NanoTime(JNIEnv* env ATTRIBUTE_UNUSED, jclass unused ATTRIBUTE_UNUSED) { +JNIEXPORT __attribute__((noreturn)) jlong JVM_NanoTime([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass unused) { UNIMPLEMENTED(FATAL) << "JVM_NanoTime is not implemented"; UNREACHABLE(); } @@ -461,17 +463,18 @@ JNIEXPORT __attribute__((noreturn)) void JVM_ArrayCopy(JNIEnv* /* env */, jclass UNREACHABLE(); } -JNIEXPORT __attribute__((noreturn)) jint JVM_FindSignal(const char* name ATTRIBUTE_UNUSED) { +JNIEXPORT __attribute__((noreturn)) jint JVM_FindSignal([[maybe_unused]] const char* name) { LOG(FATAL) << "JVM_FindSignal is not implemented"; UNREACHABLE(); } -JNIEXPORT __attribute__((noreturn)) void* JVM_RegisterSignal(jint signum ATTRIBUTE_UNUSED, void* handler ATTRIBUTE_UNUSED) { +JNIEXPORT __attribute__((noreturn)) void* JVM_RegisterSignal([[maybe_unused]] jint signum, + [[maybe_unused]] void* handler) { LOG(FATAL) << "JVM_RegisterSignal is not implemented"; UNREACHABLE(); } -JNIEXPORT __attribute__((noreturn)) jboolean JVM_RaiseSignal(jint signum ATTRIBUTE_UNUSED) { +JNIEXPORT __attribute__((noreturn)) jboolean JVM_RaiseSignal([[maybe_unused]] jint signum) { LOG(FATAL) << "JVM_RaiseSignal is not implemented"; UNREACHABLE(); } diff --git a/openjdkjvmti/OpenjdkJvmTi.cc b/openjdkjvmti/OpenjdkJvmTi.cc index 276b3a813d..1e76368618 100644 --- a/openjdkjvmti/OpenjdkJvmTi.cc +++ b/openjdkjvmti/OpenjdkJvmTi.cc @@ -472,9 +472,9 @@ class JvmtiFunctions { static jvmtiError IterateOverObjectsReachableFromObject( jvmtiEnv* env, - jobject object ATTRIBUTE_UNUSED, - jvmtiObjectReferenceCallback object_reference_callback ATTRIBUTE_UNUSED, - const void* user_data ATTRIBUTE_UNUSED) { + [[maybe_unused]] jobject object, + [[maybe_unused]] jvmtiObjectReferenceCallback object_reference_callback, + [[maybe_unused]] const void* user_data) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_tag_objects); return ERR(NOT_IMPLEMENTED); @@ -482,19 +482,19 @@ class JvmtiFunctions { static jvmtiError IterateOverReachableObjects( jvmtiEnv* env, - jvmtiHeapRootCallback heap_root_callback ATTRIBUTE_UNUSED, - jvmtiStackReferenceCallback stack_ref_callback ATTRIBUTE_UNUSED, - jvmtiObjectReferenceCallback object_ref_callback ATTRIBUTE_UNUSED, - const void* user_data ATTRIBUTE_UNUSED) { + [[maybe_unused]] jvmtiHeapRootCallback heap_root_callback, + [[maybe_unused]] jvmtiStackReferenceCallback stack_ref_callback, + [[maybe_unused]] jvmtiObjectReferenceCallback object_ref_callback, + [[maybe_unused]] const void* user_data) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_tag_objects); return ERR(NOT_IMPLEMENTED); } static jvmtiError IterateOverHeap(jvmtiEnv* env, - jvmtiHeapObjectFilter object_filter ATTRIBUTE_UNUSED, - jvmtiHeapObjectCallback heap_object_callback ATTRIBUTE_UNUSED, - const void* user_data ATTRIBUTE_UNUSED) { + [[maybe_unused]] jvmtiHeapObjectFilter object_filter, + [[maybe_unused]] jvmtiHeapObjectCallback heap_object_callback, + [[maybe_unused]] const void* user_data) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_tag_objects); return ERR(NOT_IMPLEMENTED); @@ -730,10 +730,10 @@ class JvmtiFunctions { } static jvmtiError GetConstantPool(jvmtiEnv* env, - jclass klass ATTRIBUTE_UNUSED, - jint* constant_pool_count_ptr ATTRIBUTE_UNUSED, - jint* constant_pool_byte_count_ptr ATTRIBUTE_UNUSED, - unsigned char** constant_pool_bytes_ptr ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass klass, + [[maybe_unused]] jint* constant_pool_count_ptr, + [[maybe_unused]] jint* constant_pool_byte_count_ptr, + [[maybe_unused]] unsigned char** constant_pool_bytes_ptr) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_get_constant_pool); return ERR(NOT_IMPLEMENTED); @@ -926,15 +926,15 @@ class JvmtiFunctions { return MethodUtil::IsMethodObsolete(env, method, is_obsolete_ptr); } - static jvmtiError SetNativeMethodPrefix(jvmtiEnv* env, const char* prefix ATTRIBUTE_UNUSED) { + static jvmtiError SetNativeMethodPrefix(jvmtiEnv* env, [[maybe_unused]] const char* prefix) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_set_native_method_prefix); return ERR(NOT_IMPLEMENTED); } static jvmtiError SetNativeMethodPrefixes(jvmtiEnv* env, - jint prefix_count ATTRIBUTE_UNUSED, - char** prefixes ATTRIBUTE_UNUSED) { + [[maybe_unused]] jint prefix_count, + [[maybe_unused]] char** prefixes) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_set_native_method_prefix); return ERR(NOT_IMPLEMENTED); @@ -1032,8 +1032,7 @@ class JvmtiFunctions { mode); } - static jvmtiError GenerateEvents(jvmtiEnv* env, - jvmtiEvent event_type ATTRIBUTE_UNUSED) { + static jvmtiError GenerateEvents(jvmtiEnv* env, [[maybe_unused]] jvmtiEvent event_type) { ENSURE_VALID_ENV(env); return OK; } @@ -1195,28 +1194,28 @@ class JvmtiFunctions { } static jvmtiError GetCurrentThreadCpuTimerInfo(jvmtiEnv* env, - jvmtiTimerInfo* info_ptr ATTRIBUTE_UNUSED) { + [[maybe_unused]] jvmtiTimerInfo* info_ptr) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_get_current_thread_cpu_time); return ERR(NOT_IMPLEMENTED); } - static jvmtiError GetCurrentThreadCpuTime(jvmtiEnv* env, jlong* nanos_ptr ATTRIBUTE_UNUSED) { + static jvmtiError GetCurrentThreadCpuTime(jvmtiEnv* env, [[maybe_unused]] jlong* nanos_ptr) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_get_current_thread_cpu_time); return ERR(NOT_IMPLEMENTED); } static jvmtiError GetThreadCpuTimerInfo(jvmtiEnv* env, - jvmtiTimerInfo* info_ptr ATTRIBUTE_UNUSED) { + [[maybe_unused]] jvmtiTimerInfo* info_ptr) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_get_thread_cpu_time); return ERR(NOT_IMPLEMENTED); } static jvmtiError GetThreadCpuTime(jvmtiEnv* env, - jthread thread ATTRIBUTE_UNUSED, - jlong* nanos_ptr ATTRIBUTE_UNUSED) { + [[maybe_unused]] jthread thread, + [[maybe_unused]] jlong* nanos_ptr) { ENSURE_VALID_ENV(env); ENSURE_HAS_CAP(env, can_get_thread_cpu_time); return ERR(NOT_IMPLEMENTED); diff --git a/openjdkjvmti/alloc_manager.cc b/openjdkjvmti/alloc_manager.cc index 5910073d79..b20e098546 100644 --- a/openjdkjvmti/alloc_manager.cc +++ b/openjdkjvmti/alloc_manager.cc @@ -198,9 +198,8 @@ void AllocationManager::PauseAllocations(art::Thread* self) { // Force every thread to either be suspended or pass through a barrier. art::ScopedThreadSuspension sts(self, art::ThreadState::kSuspended); art::Barrier barrier(0); - art::FunctionClosure fc([&](art::Thread* thr ATTRIBUTE_UNUSED) { - barrier.Pass(art::Thread::Current()); - }); + art::FunctionClosure fc( + [&]([[maybe_unused]] art::Thread* thr) { barrier.Pass(art::Thread::Current()); }); size_t requested = art::Runtime::Current()->GetThreadList()->RunCheckpoint(&fc); barrier.Increment(self, requested); } diff --git a/openjdkjvmti/events-inl.h b/openjdkjvmti/events-inl.h index 92cfbc9c41..1e309b4938 100644 --- a/openjdkjvmti/events-inl.h +++ b/openjdkjvmti/events-inl.h @@ -360,8 +360,9 @@ template <> inline bool EventHandler::ShouldDispatch<ArtJvmtiEvent::kBreakpoint>( ArtJvmTiEnv* env, art::Thread* thread, - JNIEnv* jnienv ATTRIBUTE_UNUSED, - jthread jni_thread ATTRIBUTE_UNUSED, + + [[maybe_unused]] JNIEnv* jnienv, + [[maybe_unused]] jthread jni_thread, jmethodID jmethod, jlocation location) const { art::ReaderMutexLock lk(art::Thread::Current(), env->event_info_mutex_); @@ -374,10 +375,10 @@ template <> inline bool EventHandler::ShouldDispatch<ArtJvmtiEvent::kFramePop>( ArtJvmTiEnv* env, art::Thread* thread, - JNIEnv* jnienv ATTRIBUTE_UNUSED, - jthread jni_thread ATTRIBUTE_UNUSED, - jmethodID jmethod ATTRIBUTE_UNUSED, - jboolean is_exception ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIEnv* jnienv, + [[maybe_unused]] jthread jni_thread, + [[maybe_unused]] jmethodID jmethod, + [[maybe_unused]] jboolean is_exception, const art::ShadowFrame* frame) const { // Search for the frame. Do this before checking if we need to send the event so that we don't // have to deal with use-after-free or the frames being reallocated later. @@ -395,15 +396,15 @@ template <> inline bool EventHandler::ShouldDispatch<ArtJvmtiEvent::kFieldModification>( ArtJvmTiEnv* env, art::Thread* thread, - JNIEnv* jnienv ATTRIBUTE_UNUSED, - jthread jni_thread ATTRIBUTE_UNUSED, - jmethodID method ATTRIBUTE_UNUSED, - jlocation location ATTRIBUTE_UNUSED, - jclass field_klass ATTRIBUTE_UNUSED, - jobject object ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIEnv* jnienv, + [[maybe_unused]] jthread jni_thread, + [[maybe_unused]] jmethodID method, + [[maybe_unused]] jlocation location, + [[maybe_unused]] jclass field_klass, + [[maybe_unused]] jobject object, jfieldID field, - char type_char ATTRIBUTE_UNUSED, - jvalue val ATTRIBUTE_UNUSED) const { + [[maybe_unused]] char type_char, + [[maybe_unused]] jvalue val) const { art::ReaderMutexLock lk(art::Thread::Current(), env->event_info_mutex_); return ShouldDispatchOnThread<ArtJvmtiEvent::kFieldModification>(env, thread) && env->modify_watched_fields.find( @@ -414,12 +415,12 @@ template <> inline bool EventHandler::ShouldDispatch<ArtJvmtiEvent::kFieldAccess>( ArtJvmTiEnv* env, art::Thread* thread, - JNIEnv* jnienv ATTRIBUTE_UNUSED, - jthread jni_thread ATTRIBUTE_UNUSED, - jmethodID method ATTRIBUTE_UNUSED, - jlocation location ATTRIBUTE_UNUSED, - jclass field_klass ATTRIBUTE_UNUSED, - jobject object ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIEnv* jnienv, + [[maybe_unused]] jthread jni_thread, + [[maybe_unused]] jmethodID method, + [[maybe_unused]] jlocation location, + [[maybe_unused]] jclass field_klass, + [[maybe_unused]] jobject object, jfieldID field) const { art::ReaderMutexLock lk(art::Thread::Current(), env->event_info_mutex_); return ShouldDispatchOnThread<ArtJvmtiEvent::kFieldAccess>(env, thread) && @@ -439,7 +440,7 @@ inline void EventHandler::ExecuteCallback<ArtJvmtiEvent::kFramePop>( jthread jni_thread, jmethodID jmethod, jboolean is_exception, - const art::ShadowFrame* frame ATTRIBUTE_UNUSED) { + [[maybe_unused]] const art::ShadowFrame* frame) { ExecuteCallback<ArtJvmtiEvent::kFramePop>(event, jnienv, jni_thread, jmethod, is_exception); } @@ -628,10 +629,10 @@ inline bool EventHandler::ShouldDispatchOnThread(ArtJvmTiEnv* env, art::Thread* return dispatch; } -template <ArtJvmtiEvent kEvent, typename ...Args> +template <ArtJvmtiEvent kEvent, typename... Args> inline bool EventHandler::ShouldDispatch(ArtJvmTiEnv* env, art::Thread* thread, - Args... args ATTRIBUTE_UNUSED) const { + [[maybe_unused]] Args... args) const { static_assert(std::is_same<typename impl::EventFnType<kEvent>::type, void(*)(jvmtiEnv*, Args...)>::value, "Unexpected different type of shouldDispatch"); diff --git a/openjdkjvmti/events.cc b/openjdkjvmti/events.cc index 64da6ed3f6..31107d08a5 100644 --- a/openjdkjvmti/events.cc +++ b/openjdkjvmti/events.cc @@ -737,9 +737,7 @@ class JvmtiMethodTraceListener final : public art::instrumentation::Instrumentat // Call-back for when a method is popped due to an exception throw. A method will either cause a // MethodExited call-back or a MethodUnwind call-back when its activation is removed. - void MethodUnwind(art::Thread* self, - art::ArtMethod* method, - uint32_t dex_pc ATTRIBUTE_UNUSED) + void MethodUnwind(art::Thread* self, art::ArtMethod* method, [[maybe_unused]] uint32_t dex_pc) REQUIRES_SHARED(art::Locks::mutator_lock_) override { if (!method->IsRuntimeMethod() && event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) { @@ -767,10 +765,9 @@ class JvmtiMethodTraceListener final : public art::instrumentation::Instrumentat // Call-back for when the dex pc moves in a method. void DexPcMoved(art::Thread* self, - art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED, + [[maybe_unused]] art::Handle<art::mirror::Object> this_object, art::ArtMethod* method, - uint32_t new_dex_pc) - REQUIRES_SHARED(art::Locks::mutator_lock_) override { + uint32_t new_dex_pc) REQUIRES_SHARED(art::Locks::mutator_lock_) override { DCHECK(!method->IsRuntimeMethod()); // Default methods might be copied to multiple classes. We need to get the canonical version of // this method so that we can check for breakpoints correctly. @@ -1034,10 +1031,10 @@ class JvmtiMethodTraceListener final : public art::instrumentation::Instrumentat } // Call-back for when we execute a branch. - void Branch(art::Thread* self ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - int32_t dex_pc_offset ATTRIBUTE_UNUSED) + void Branch([[maybe_unused]] art::Thread* self, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] int32_t dex_pc_offset) REQUIRES_SHARED(art::Locks::mutator_lock_) override { return; } diff --git a/openjdkjvmti/jvmti_allocator.h b/openjdkjvmti/jvmti_allocator.h index 4adf769f12..618a661c5a 100644 --- a/openjdkjvmti/jvmti_allocator.h +++ b/openjdkjvmti/jvmti_allocator.h @@ -110,7 +110,7 @@ class JvmtiAllocator { pointer address(reference x) const { return &x; } const_pointer address(const_reference x) const { return &x; } - pointer allocate(size_type n, JvmtiAllocator<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) { + pointer allocate(size_type n, [[maybe_unused]] JvmtiAllocator<void>::pointer hint = nullptr) { DCHECK_LE(n, max_size()); if (env_ == nullptr) { T* result = reinterpret_cast<T*>(AllocUtil::AllocateImpl(n * sizeof(T))); @@ -123,7 +123,7 @@ class JvmtiAllocator { return reinterpret_cast<T*>(result); } } - void deallocate(pointer p, size_type n ATTRIBUTE_UNUSED) { + void deallocate(pointer p, [[maybe_unused]] size_type n) { if (env_ == nullptr) { AllocUtil::DeallocateImpl(reinterpret_cast<unsigned char*>(p)); } else { diff --git a/openjdkjvmti/jvmti_weak_table-inl.h b/openjdkjvmti/jvmti_weak_table-inl.h index c5663e5475..7502ad272a 100644 --- a/openjdkjvmti/jvmti_weak_table-inl.h +++ b/openjdkjvmti/jvmti_weak_table-inl.h @@ -68,10 +68,10 @@ void JvmtiWeakTable<T>::UpdateTableWithReadBarrier() { update_since_last_sweep_ = true; auto WithReadBarrierUpdater = [&](const art::GcRoot<art::mirror::Object>& original_root, - art::mirror::Object* original_obj ATTRIBUTE_UNUSED) - REQUIRES_SHARED(art::Locks::mutator_lock_) { - return original_root.Read<art::kWithReadBarrier>(); - }; + [[maybe_unused]] art::mirror::Object* original_obj) + REQUIRES_SHARED(art::Locks::mutator_lock_) { + return original_root.Read<art::kWithReadBarrier>(); + }; UpdateTableWith<decltype(WithReadBarrierUpdater), kIgnoreNull>(WithReadBarrierUpdater); } @@ -198,7 +198,7 @@ void JvmtiWeakTable<T>::SweepImpl(art::IsMarkedVisitor* visitor) { art::Thread* self = art::Thread::Current(); art::MutexLock mu(self, allow_disallow_lock_); - auto IsMarkedUpdater = [&](const art::GcRoot<art::mirror::Object>& original_root ATTRIBUTE_UNUSED, + auto IsMarkedUpdater = [&]([[maybe_unused]] const art::GcRoot<art::mirror::Object>& original_root, art::mirror::Object* original_obj) { return visitor->IsMarked(original_obj); }; diff --git a/openjdkjvmti/jvmti_weak_table.h b/openjdkjvmti/jvmti_weak_table.h index 674b2a3d52..8f8d89b818 100644 --- a/openjdkjvmti/jvmti_weak_table.h +++ b/openjdkjvmti/jvmti_weak_table.h @@ -128,7 +128,7 @@ class JvmtiWeakTable : public art::gc::SystemWeakHolder { return false; } // If DoesHandleNullOnSweep returns true, this function will be called. - virtual void HandleNullSweep(T tag ATTRIBUTE_UNUSED) {} + virtual void HandleNullSweep([[maybe_unused]] T tag) {} private: ALWAYS_INLINE diff --git a/openjdkjvmti/ti_allocator.cc b/openjdkjvmti/ti_allocator.cc index 575558dccf..1e6d4627b5 100644 --- a/openjdkjvmti/ti_allocator.cc +++ b/openjdkjvmti/ti_allocator.cc @@ -47,7 +47,7 @@ namespace openjdkjvmti { std::atomic<jlong> AllocUtil::allocated; -jvmtiError AllocUtil::GetGlobalJvmtiAllocationState(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError AllocUtil::GetGlobalJvmtiAllocationState([[maybe_unused]] jvmtiEnv* env, jlong* allocated_ptr) { if (allocated_ptr == nullptr) { return ERR(NULL_POINTER); @@ -56,7 +56,7 @@ jvmtiError AllocUtil::GetGlobalJvmtiAllocationState(jvmtiEnv* env ATTRIBUTE_UNUS return OK; } -jvmtiError AllocUtil::Allocate(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError AllocUtil::Allocate([[maybe_unused]] jvmtiEnv* env, jlong size, unsigned char** mem_ptr) { if (size < 0) { @@ -80,7 +80,7 @@ unsigned char* AllocUtil::AllocateImpl(jlong size) { return ret; } -jvmtiError AllocUtil::Deallocate(jvmtiEnv* env ATTRIBUTE_UNUSED, unsigned char* mem) { +jvmtiError AllocUtil::Deallocate([[maybe_unused]] jvmtiEnv* env, unsigned char* mem) { DeallocateImpl(mem); return OK; } diff --git a/openjdkjvmti/ti_class.cc b/openjdkjvmti/ti_class.cc index 3d44516173..7ded350c53 100644 --- a/openjdkjvmti/ti_class.cc +++ b/openjdkjvmti/ti_class.cc @@ -162,10 +162,10 @@ struct ClassCallback : public art::ClassLoadCallback { art::Handle<art::mirror::Class> klass, art::Handle<art::mirror::ClassLoader> class_loader, const art::DexFile& initial_dex_file, - const art::dex::ClassDef& initial_class_def ATTRIBUTE_UNUSED, - /*out*/art::DexFile const** final_dex_file, - /*out*/art::dex::ClassDef const** final_class_def) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { + [[maybe_unused]] const art::dex::ClassDef& initial_class_def, + /*out*/ art::DexFile const** final_dex_file, + /*out*/ art::dex::ClassDef const** final_class_def) override + REQUIRES_SHARED(art::Locks::mutator_lock_) { bool is_enabled = event_handler->IsEventEnabledAnywhere(ArtJvmtiEvent::kClassFileLoadHookRetransformable) || event_handler->IsEventEnabledAnywhere(ArtJvmtiEvent::kClassFileLoadHookNonRetransformable); @@ -387,8 +387,7 @@ struct ClassCallback : public art::ClassLoadCallback { void VisitRoots(art::mirror::Object*** roots, size_t count, - const art::RootInfo& info ATTRIBUTE_UNUSED) - override { + [[maybe_unused]] const art::RootInfo& info) override { for (size_t i = 0; i != count; ++i) { if (*roots[i] == input_) { *roots[i] = output_; @@ -398,8 +397,8 @@ struct ClassCallback : public art::ClassLoadCallback { void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots, size_t count, - const art::RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { + [[maybe_unused]] const art::RootInfo& info) override + REQUIRES_SHARED(art::Locks::mutator_lock_) { for (size_t i = 0; i != count; ++i) { if (roots[i]->AsMirrorPtr() == input_) { roots[i]->Assign(output_); @@ -476,7 +475,7 @@ struct ClassCallback : public art::ClassLoadCallback { void operator()(art::mirror::Object* src, art::MemberOffset field_offset, - bool is_static ATTRIBUTE_UNUSED) const + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(art::Locks::mutator_lock_) { art::mirror::HeapReference<art::mirror::Object>* trg = src->GetFieldObjectReferenceAddr(field_offset); @@ -487,7 +486,7 @@ struct ClassCallback : public art::ClassLoadCallback { } } - void operator()(art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED, + void operator()([[maybe_unused]] art::ObjPtr<art::mirror::Class> klass, art::ObjPtr<art::mirror::Reference> reference) const REQUIRES_SHARED(art::Locks::mutator_lock_) { art::mirror::Object* val = reference->GetReferent(); @@ -496,13 +495,13 @@ struct ClassCallback : public art::ClassLoadCallback { } } - void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) - const { + void VisitRoot( + [[maybe_unused]] art::mirror::CompressedReference<art::mirror::Object>* root) const { LOG(FATAL) << "Unreachable"; } void VisitRootIfNonNull( - art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const { + [[maybe_unused]] art::mirror::CompressedReference<art::mirror::Object>* root) const { LOG(FATAL) << "Unreachable"; } @@ -623,7 +622,7 @@ jvmtiError ClassUtil::GetClassMethods(jvmtiEnv* env, if (art::kIsDebugBuild) { size_t count = 0; - for (auto& m ATTRIBUTE_UNUSED : klass->GetDeclaredMethods(art::kRuntimePointerSize)) { + for ([[maybe_unused]] auto& m : klass->GetDeclaredMethods(art::kRuntimePointerSize)) { count++; } CHECK_EQ(count, klass->NumDirectMethods() + klass->NumDeclaredVirtualMethods()); @@ -747,7 +746,7 @@ jvmtiError ClassUtil::GetClassSignature(jvmtiEnv* env, return ERR(NONE); } -jvmtiError ClassUtil::GetClassStatus(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ClassUtil::GetClassStatus([[maybe_unused]] jvmtiEnv* env, jclass jklass, jint* status_ptr) { art::ScopedObjectAccess soa(art::Thread::Current()); @@ -798,7 +797,7 @@ static jvmtiError ClassIsT(jclass jklass, T test, jboolean* is_t_ptr) { return ERR(NONE); } -jvmtiError ClassUtil::IsInterface(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ClassUtil::IsInterface([[maybe_unused]] jvmtiEnv* env, jclass jklass, jboolean* is_interface_ptr) { auto test = [](art::ObjPtr<art::mirror::Class> klass) REQUIRES_SHARED(art::Locks::mutator_lock_) { @@ -807,7 +806,7 @@ jvmtiError ClassUtil::IsInterface(jvmtiEnv* env ATTRIBUTE_UNUSED, return ClassIsT(jklass, test, is_interface_ptr); } -jvmtiError ClassUtil::IsArrayClass(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ClassUtil::IsArrayClass([[maybe_unused]] jvmtiEnv* env, jclass jklass, jboolean* is_array_class_ptr) { auto test = [](art::ObjPtr<art::mirror::Class> klass) REQUIRES_SHARED(art::Locks::mutator_lock_) { @@ -834,7 +833,7 @@ static uint32_t ClassGetModifiers(art::Thread* self, art::ObjPtr<art::mirror::Cl return art::mirror::Class::GetInnerClassFlags(h_klass, modifiers); } -jvmtiError ClassUtil::GetClassModifiers(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ClassUtil::GetClassModifiers([[maybe_unused]] jvmtiEnv* env, jclass jklass, jint* modifiers_ptr) { art::ScopedObjectAccess soa(art::Thread::Current()); @@ -852,7 +851,7 @@ jvmtiError ClassUtil::GetClassModifiers(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError ClassUtil::GetClassLoader(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ClassUtil::GetClassLoader([[maybe_unused]] jvmtiEnv* env, jclass jklass, jobject* classloader_ptr) { art::ScopedObjectAccess soa(art::Thread::Current()); @@ -1047,7 +1046,7 @@ jvmtiError ClassUtil::GetClassLoaderClasses(jvmtiEnv* env, return ERR(NONE); } -jvmtiError ClassUtil::GetClassVersionNumbers(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ClassUtil::GetClassVersionNumbers([[maybe_unused]] jvmtiEnv* env, jclass jklass, jint* minor_version_ptr, jint* major_version_ptr) { diff --git a/openjdkjvmti/ti_field.cc b/openjdkjvmti/ti_field.cc index d4c0ec830b..4e39e22649 100644 --- a/openjdkjvmti/ti_field.cc +++ b/openjdkjvmti/ti_field.cc @@ -200,7 +200,7 @@ jvmtiError FieldUtil::GetFieldName(jvmtiEnv* env, return ERR(NONE); } -jvmtiError FieldUtil::GetFieldDeclaringClass(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError FieldUtil::GetFieldDeclaringClass([[maybe_unused]] jvmtiEnv* env, jclass klass, jfieldID field, jclass* declaring_class_ptr) { @@ -223,7 +223,7 @@ jvmtiError FieldUtil::GetFieldDeclaringClass(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError FieldUtil::GetFieldModifiers(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError FieldUtil::GetFieldModifiers([[maybe_unused]] jvmtiEnv* env, jclass klass, jfieldID field, jint* modifiers_ptr) { @@ -246,7 +246,7 @@ jvmtiError FieldUtil::GetFieldModifiers(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError FieldUtil::IsFieldSynthetic(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError FieldUtil::IsFieldSynthetic([[maybe_unused]] jvmtiEnv* env, jclass klass, jfieldID field, jboolean* is_synthetic_ptr) { diff --git a/openjdkjvmti/ti_heap.cc b/openjdkjvmti/ti_heap.cc index 01864cd312..4be7922e70 100644 --- a/openjdkjvmti/ti_heap.cc +++ b/openjdkjvmti/ti_heap.cc @@ -211,11 +211,11 @@ jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj, } template <typename UserData> -bool VisitorFalse(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED, - art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED, - art::ArtField& field ATTRIBUTE_UNUSED, - size_t field_index ATTRIBUTE_UNUSED, - UserData* user_data ATTRIBUTE_UNUSED) { +bool VisitorFalse([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj, + [[maybe_unused]] art::ObjPtr<art::mirror::Class> klass, + [[maybe_unused]] art::ArtField& field, + [[maybe_unused]] size_t field_index, + [[maybe_unused]] UserData* user_data) { return false; } @@ -476,11 +476,11 @@ class FieldVisitor { // Debug helper. Prints the structure of an object. template <bool kStatic, bool kRef> struct DumpVisitor { - static bool Callback(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED, - art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED, + static bool Callback([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj, + [[maybe_unused]] art::ObjPtr<art::mirror::Class> klass, art::ArtField& field, size_t field_index, - void* user_data ATTRIBUTE_UNUSED) + [[maybe_unused]] void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) { LOG(ERROR) << (kStatic ? "static " : "instance ") << (kRef ? "ref " : "primitive ") @@ -490,8 +490,7 @@ struct DumpVisitor { return false; } }; -ATTRIBUTE_UNUSED -void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj) +[[maybe_unused]] void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj) REQUIRES_SHARED(art::Locks::mutator_lock_) { if (obj->IsClass()) { FieldVisitor<void, false>:: ReportFields(obj, @@ -825,14 +824,13 @@ jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env, jclass klass, const jvmtiHeapCallbacks* callbacks, const void* user_data) { - auto JvmtiIterateHeap = [](art::mirror::Object* obj ATTRIBUTE_UNUSED, + auto JvmtiIterateHeap = []([[maybe_unused]] art::mirror::Object* obj, const jvmtiHeapCallbacks* cb_callbacks, jlong class_tag, jlong size, jlong* tag, jint length, - void* cb_user_data) - REQUIRES_SHARED(art::Locks::mutator_lock_) { + void* cb_user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) { return cb_callbacks->heap_iteration_callback(class_tag, size, tag, @@ -1108,31 +1106,33 @@ class FollowReferencesHelper final { } // All instance fields. - auto report_instance_field = [&](art::ObjPtr<art::mirror::Object> src, - art::ObjPtr<art::mirror::Class> obj_klass ATTRIBUTE_UNUSED, - art::ArtField& field, - size_t field_index, - void* user_data ATTRIBUTE_UNUSED) - REQUIRES_SHARED(art::Locks::mutator_lock_) - REQUIRES(!*tag_table_->GetAllowDisallowLock()) { - art::ObjPtr<art::mirror::Object> field_value = field.GetObject(src); - if (field_value != nullptr) { - jvmtiHeapReferenceInfo reference_info; - memset(&reference_info, 0, sizeof(reference_info)); - - reference_info.field.index = field_index; - - jvmtiHeapReferenceKind kind = - field.GetOffset().Int32Value() == art::mirror::Object::ClassOffset().Int32Value() - ? JVMTI_HEAP_REFERENCE_CLASS - : JVMTI_HEAP_REFERENCE_FIELD; - const jvmtiHeapReferenceInfo* reference_info_ptr = - kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info; - - return !ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src.Ptr(), field_value.Ptr()); - } - return false; - }; + auto report_instance_field = + [&](art::ObjPtr<art::mirror::Object> src, + [[maybe_unused]] art::ObjPtr<art::mirror::Class> obj_klass, + art::ArtField& field, + size_t field_index, + [[maybe_unused]] void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) + REQUIRES(!*tag_table_->GetAllowDisallowLock()) { + art::ObjPtr<art::mirror::Object> field_value = field.GetObject(src); + if (field_value != nullptr) { + jvmtiHeapReferenceInfo reference_info; + memset(&reference_info, 0, sizeof(reference_info)); + + reference_info.field.index = field_index; + + jvmtiHeapReferenceKind kind = + field.GetOffset().Int32Value() == + art::mirror::Object::ClassOffset().Int32Value() ? + JVMTI_HEAP_REFERENCE_CLASS : + JVMTI_HEAP_REFERENCE_FIELD; + const jvmtiHeapReferenceInfo* reference_info_ptr = + kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info; + + return !ReportReferenceMaybeEnqueue( + kind, reference_info_ptr, src.Ptr(), field_value.Ptr()); + } + return false; + }; stop_reports_ = FieldVisitor<void, true>::ReportFields(obj, nullptr, VisitorFalse<void>, @@ -1241,27 +1241,27 @@ class FollowReferencesHelper final { DCHECK_EQ(h_klass.Get(), klass); // Declared static fields. - auto report_static_field = [&](art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED, - art::ObjPtr<art::mirror::Class> obj_klass, - art::ArtField& field, - size_t field_index, - void* user_data ATTRIBUTE_UNUSED) - REQUIRES_SHARED(art::Locks::mutator_lock_) - REQUIRES(!*tag_table_->GetAllowDisallowLock()) { - art::ObjPtr<art::mirror::Object> field_value = field.GetObject(obj_klass); - if (field_value != nullptr) { - jvmtiHeapReferenceInfo reference_info; - memset(&reference_info, 0, sizeof(reference_info)); - - reference_info.field.index = static_cast<jint>(field_index); - - return !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD, - &reference_info, - obj_klass.Ptr(), - field_value.Ptr()); - } - return false; - }; + auto report_static_field = + [&]([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj, + art::ObjPtr<art::mirror::Class> obj_klass, + art::ArtField& field, + size_t field_index, + [[maybe_unused]] void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) + REQUIRES(!*tag_table_->GetAllowDisallowLock()) { + art::ObjPtr<art::mirror::Object> field_value = field.GetObject(obj_klass); + if (field_value != nullptr) { + jvmtiHeapReferenceInfo reference_info; + memset(&reference_info, 0, sizeof(reference_info)); + + reference_info.field.index = static_cast<jint>(field_index); + + return !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD, + &reference_info, + obj_klass.Ptr(), + field_value.Ptr()); + } + return false; + }; stop_reports_ = FieldVisitor<void, false>::ReportFields(klass, nullptr, VisitorFalse<void>, @@ -1473,7 +1473,7 @@ jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env, return ERR(NONE); } -jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) { +jvmtiError HeapUtil::ForceGarbageCollection([[maybe_unused]] jvmtiEnv* env) { art::Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references= */ false); return ERR(NONE); @@ -1666,7 +1666,7 @@ static void ReplaceObjectReferences(const ObjectMap& map) } // java.lang.ref.Reference visitor. - void operator()(art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED, + void operator()([[maybe_unused]] art::ObjPtr<art::mirror::Class> klass, art::ObjPtr<art::mirror::Reference> ref) const REQUIRES_SHARED(art::Locks::mutator_lock_) { operator()(ref, art::mirror::Reference::ReferentOffset(), /* is_static */ false); diff --git a/openjdkjvmti/ti_jni.cc b/openjdkjvmti/ti_jni.cc index b655d6a8e1..98d4ec7290 100644 --- a/openjdkjvmti/ti_jni.cc +++ b/openjdkjvmti/ti_jni.cc @@ -42,7 +42,7 @@ namespace openjdkjvmti { -jvmtiError JNIUtil::SetJNIFunctionTable(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError JNIUtil::SetJNIFunctionTable([[maybe_unused]] jvmtiEnv* env, const jniNativeInterface* function_table) { // While we supporting setting null (which will reset the table), the spec says no. if (function_table == nullptr) { diff --git a/openjdkjvmti/ti_logging.cc b/openjdkjvmti/ti_logging.cc index 8740ec65c2..82057b1251 100644 --- a/openjdkjvmti/ti_logging.cc +++ b/openjdkjvmti/ti_logging.cc @@ -100,7 +100,7 @@ jvmtiError LogUtil::SetVerboseFlagExt(jvmtiEnv* env, const char* data, jboolean return OK; } -jvmtiError LogUtil::SetVerboseFlag(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError LogUtil::SetVerboseFlag([[maybe_unused]] jvmtiEnv* env, jvmtiVerboseFlag flag, jboolean value) { if (flag == jvmtiVerboseFlag::JVMTI_VERBOSE_OTHER) { diff --git a/openjdkjvmti/ti_method.cc b/openjdkjvmti/ti_method.cc index 99a5d9c988..dc3f6dcf91 100644 --- a/openjdkjvmti/ti_method.cc +++ b/openjdkjvmti/ti_method.cc @@ -162,7 +162,7 @@ jvmtiError MethodUtil::GetBytecodes(jvmtiEnv* env, return OK; } -jvmtiError MethodUtil::GetArgumentsSize(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MethodUtil::GetArgumentsSize([[maybe_unused]] jvmtiEnv* env, jmethodID method, jint* size_ptr) { if (method == nullptr) { @@ -284,7 +284,7 @@ jvmtiError MethodUtil::GetLocalVariableTable(jvmtiEnv* env, return release(entry_count_ptr, table_ptr); } -jvmtiError MethodUtil::GetMaxLocals(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MethodUtil::GetMaxLocals([[maybe_unused]] jvmtiEnv* env, jmethodID method, jint* max_ptr) { if (method == nullptr) { @@ -380,7 +380,7 @@ jvmtiError MethodUtil::GetMethodName(jvmtiEnv* env, return ERR(NONE); } -jvmtiError MethodUtil::GetMethodDeclaringClass(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MethodUtil::GetMethodDeclaringClass([[maybe_unused]] jvmtiEnv* env, jmethodID method, jclass* declaring_class_ptr) { if (declaring_class_ptr == nullptr) { @@ -397,7 +397,7 @@ jvmtiError MethodUtil::GetMethodDeclaringClass(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError MethodUtil::GetMethodLocation(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MethodUtil::GetMethodLocation([[maybe_unused]] jvmtiEnv* env, jmethodID method, jlocation* start_location_ptr, jlocation* end_location_ptr) { @@ -430,7 +430,7 @@ jvmtiError MethodUtil::GetMethodLocation(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError MethodUtil::GetMethodModifiers(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MethodUtil::GetMethodModifiers([[maybe_unused]] jvmtiEnv* env, jmethodID method, jint* modifiers_ptr) { if (modifiers_ptr == nullptr) { @@ -507,7 +507,7 @@ jvmtiError MethodUtil::GetLineNumberTable(jvmtiEnv* env, } template <typename T> -static jvmtiError IsMethodT(jvmtiEnv* env ATTRIBUTE_UNUSED, +static jvmtiError IsMethodT([[maybe_unused]] jvmtiEnv* env, jmethodID method, T test, jboolean* is_t_ptr) { @@ -833,9 +833,9 @@ class GetLocalVariableClosure : public CommonLocalVariableClosure { return res; } - jvmtiError GetTypeErrorInner(art::ArtMethod* method ATTRIBUTE_UNUSED, + jvmtiError GetTypeErrorInner([[maybe_unused]] art::ArtMethod* method, SlotType slot_type, - const std::string& descriptor ATTRIBUTE_UNUSED) + [[maybe_unused]] const std::string& descriptor) REQUIRES_SHARED(art::Locks::mutator_lock_) { switch (type_) { case art::Primitive::kPrimFloat: @@ -1177,7 +1177,7 @@ class GetLocalInstanceClosure : public art::Closure { art::GcRoot<art::mirror::Object> val_; }; -jvmtiError MethodUtil::GetLocalInstance(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MethodUtil::GetLocalInstance([[maybe_unused]] jvmtiEnv* env, jthread thread, jint depth, jobject* data) { diff --git a/openjdkjvmti/ti_monitor.cc b/openjdkjvmti/ti_monitor.cc index f244cc114d..469693d37f 100644 --- a/openjdkjvmti/ti_monitor.cc +++ b/openjdkjvmti/ti_monitor.cc @@ -225,7 +225,7 @@ static JvmtiMonitor* DecodeMonitor(jrawMonitorID id) { return reinterpret_cast<JvmtiMonitor*>(id); } -jvmtiError MonitorUtil::CreateRawMonitor(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MonitorUtil::CreateRawMonitor([[maybe_unused]] jvmtiEnv* env, const char* name, jrawMonitorID* monitor_ptr) { if (name == nullptr || monitor_ptr == nullptr) { @@ -238,7 +238,7 @@ jvmtiError MonitorUtil::CreateRawMonitor(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError MonitorUtil::DestroyRawMonitor(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) { +jvmtiError MonitorUtil::DestroyRawMonitor([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) { if (id == nullptr) { return ERR(INVALID_MONITOR); } @@ -253,7 +253,7 @@ jvmtiError MonitorUtil::DestroyRawMonitor(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMo return ERR(NONE); } -jvmtiError MonitorUtil::RawMonitorEnterNoSuspend(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) { +jvmtiError MonitorUtil::RawMonitorEnterNoSuspend([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) { if (id == nullptr) { return ERR(INVALID_MONITOR); } @@ -266,7 +266,7 @@ jvmtiError MonitorUtil::RawMonitorEnterNoSuspend(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError MonitorUtil::RawMonitorEnter(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) { +jvmtiError MonitorUtil::RawMonitorEnter([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) { if (id == nullptr) { return ERR(INVALID_MONITOR); } @@ -279,7 +279,7 @@ jvmtiError MonitorUtil::RawMonitorEnter(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMoni return ERR(NONE); } -jvmtiError MonitorUtil::RawMonitorExit(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) { +jvmtiError MonitorUtil::RawMonitorExit([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) { if (id == nullptr) { return ERR(INVALID_MONITOR); } @@ -294,7 +294,7 @@ jvmtiError MonitorUtil::RawMonitorExit(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonit return ERR(NONE); } -jvmtiError MonitorUtil::RawMonitorWait(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MonitorUtil::RawMonitorWait([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id, jlong millis) { if (id == nullptr) { @@ -322,7 +322,7 @@ jvmtiError MonitorUtil::RawMonitorWait(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError MonitorUtil::RawMonitorNotify(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) { +jvmtiError MonitorUtil::RawMonitorNotify([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) { if (id == nullptr) { return ERR(INVALID_MONITOR); } @@ -337,7 +337,7 @@ jvmtiError MonitorUtil::RawMonitorNotify(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMon return ERR(NONE); } -jvmtiError MonitorUtil::RawMonitorNotifyAll(jvmtiEnv* env ATTRIBUTE_UNUSED, jrawMonitorID id) { +jvmtiError MonitorUtil::RawMonitorNotifyAll([[maybe_unused]] jvmtiEnv* env, jrawMonitorID id) { if (id == nullptr) { return ERR(INVALID_MONITOR); } @@ -352,7 +352,7 @@ jvmtiError MonitorUtil::RawMonitorNotifyAll(jvmtiEnv* env ATTRIBUTE_UNUSED, jraw return ERR(NONE); } -jvmtiError MonitorUtil::GetCurrentContendedMonitor(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError MonitorUtil::GetCurrentContendedMonitor([[maybe_unused]] jvmtiEnv* env, jthread thread, jobject* monitor) { if (monitor == nullptr) { diff --git a/openjdkjvmti/ti_object.cc b/openjdkjvmti/ti_object.cc index eb1140df4f..f37df86048 100644 --- a/openjdkjvmti/ti_object.cc +++ b/openjdkjvmti/ti_object.cc @@ -40,7 +40,7 @@ namespace openjdkjvmti { -jvmtiError ObjectUtil::GetObjectSize(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ObjectUtil::GetObjectSize([[maybe_unused]] jvmtiEnv* env, jobject jobject, jlong* size_ptr) { if (jobject == nullptr) { @@ -57,7 +57,7 @@ jvmtiError ObjectUtil::GetObjectSize(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError ObjectUtil::GetObjectHashCode(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ObjectUtil::GetObjectHashCode([[maybe_unused]] jvmtiEnv* env, jobject jobject, jint* hash_code_ptr) { if (jobject == nullptr) { diff --git a/openjdkjvmti/ti_phase.cc b/openjdkjvmti/ti_phase.cc index 4fa97f10aa..89bf1aa384 100644 --- a/openjdkjvmti/ti_phase.cc +++ b/openjdkjvmti/ti_phase.cc @@ -97,7 +97,7 @@ struct PhaseUtil::PhaseCallback : public art::RuntimePhaseCallback { PhaseUtil::PhaseCallback gPhaseCallback; -jvmtiError PhaseUtil::GetPhase(jvmtiEnv* env ATTRIBUTE_UNUSED, jvmtiPhase* phase_ptr) { +jvmtiError PhaseUtil::GetPhase([[maybe_unused]] jvmtiEnv* env, jvmtiPhase* phase_ptr) { if (phase_ptr == nullptr) { return ERR(NULL_POINTER); } diff --git a/openjdkjvmti/ti_properties.cc b/openjdkjvmti/ti_properties.cc index 4fb3070e93..c6490c3193 100644 --- a/openjdkjvmti/ti_properties.cc +++ b/openjdkjvmti/ti_properties.cc @@ -226,9 +226,9 @@ jvmtiError PropertiesUtil::GetSystemProperty(jvmtiEnv* env, return ERR(NOT_AVAILABLE); } -jvmtiError PropertiesUtil::SetSystemProperty(jvmtiEnv* env ATTRIBUTE_UNUSED, - const char* property ATTRIBUTE_UNUSED, - const char* value ATTRIBUTE_UNUSED) { +jvmtiError PropertiesUtil::SetSystemProperty([[maybe_unused]] jvmtiEnv* env, + [[maybe_unused]] const char* property, + [[maybe_unused]] const char* value) { // We do not allow manipulation of any property here. return ERR(NOT_AVAILABLE); } diff --git a/openjdkjvmti/ti_redefine.cc b/openjdkjvmti/ti_redefine.cc index aafca47605..dfcbeb42e3 100644 --- a/openjdkjvmti/ti_redefine.cc +++ b/openjdkjvmti/ti_redefine.cc @@ -2372,9 +2372,9 @@ class ClassDefinitionPauser : public art::ClassLoadCallback { } } - void ClassLoad(art::Handle<art::mirror::Class> klass ATTRIBUTE_UNUSED) override {} - void ClassPrepare(art::Handle<art::mirror::Class> klass1 ATTRIBUTE_UNUSED, - art::Handle<art::mirror::Class> klass2 ATTRIBUTE_UNUSED) override {} + void ClassLoad([[maybe_unused]] art::Handle<art::mirror::Class> klass) override {} + void ClassPrepare([[maybe_unused]] art::Handle<art::mirror::Class> klass1, + [[maybe_unused]] art::Handle<art::mirror::Class> klass2) override {} void SetRunning() { is_running_ = true; diff --git a/openjdkjvmti/ti_stack.cc b/openjdkjvmti/ti_stack.cc index 8ee4adb853..9af8861260 100644 --- a/openjdkjvmti/ti_stack.cc +++ b/openjdkjvmti/ti_stack.cc @@ -716,7 +716,7 @@ struct GetFrameCountClosure : public art::Closure { size_t count; }; -jvmtiError StackUtil::GetFrameCount(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError StackUtil::GetFrameCount([[maybe_unused]] jvmtiEnv* env, jthread java_thread, jint* count_ptr) { // It is not great that we have to hold these locks for so long, but it is necessary to ensure @@ -784,7 +784,7 @@ struct GetLocationClosure : public art::Closure { uint32_t dex_pc; }; -jvmtiError StackUtil::GetFrameLocation(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError StackUtil::GetFrameLocation([[maybe_unused]] jvmtiEnv* env, jthread java_thread, jint depth, jmethodID* method_ptr, @@ -877,8 +877,8 @@ struct MonitorVisitor : public art::StackVisitor, public art::SingleRootVisitor visitor->stack_depths.push_back(visitor->current_stack_depth); } - void VisitRoot(art::mirror::Object* obj, const art::RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { + void VisitRoot(art::mirror::Object* obj, [[maybe_unused]] const art::RootInfo& info) override + REQUIRES_SHARED(art::Locks::mutator_lock_) { for (const art::Handle<art::mirror::Object>& m : monitors) { if (m.Get() == obj) { return; @@ -1219,7 +1219,7 @@ class NonStandardExitFrames { template <> bool NonStandardExitFrames<NonStandardExitType::kForceReturn>::CheckFunctions( - jvmtiEnv* env, art::ArtMethod* calling ATTRIBUTE_UNUSED, art::ArtMethod* called) { + jvmtiEnv* env, [[maybe_unused]] art::ArtMethod* calling, art::ArtMethod* called) { if (UNLIKELY(called->IsNative())) { result_ = ERR(OPAQUE_FRAME); JVMTI_LOG(INFO, env) << "Cannot force early return from " << called->PrettyMethod() @@ -1297,7 +1297,7 @@ void AddDelayedMethodExitEvent(EventHandler* handler, art::ShadowFrame* frame, T template <> void AddDelayedMethodExitEvent<std::nullptr_t>(EventHandler* handler, art::ShadowFrame* frame, - std::nullptr_t null_val ATTRIBUTE_UNUSED) { + [[maybe_unused]] std::nullptr_t null_val) { jvalue jval; memset(&jval, 0, sizeof(jval)); handler->AddDelayedNonStandardExitEvent(frame, false, jval); @@ -1316,13 +1316,13 @@ bool ValidReturnType(art::Thread* self, art::ObjPtr<art::mirror::Class> return_t REQUIRES_SHARED(art::Locks::mutator_lock_) REQUIRES(art::Locks::user_code_suspension_lock_, art::Locks::thread_list_lock_); -#define SIMPLE_VALID_RETURN_TYPE(type, ...) \ - template <> \ - bool ValidReturnType<type>(art::Thread * self ATTRIBUTE_UNUSED, \ - art::ObjPtr<art::mirror::Class> return_type, \ - type value ATTRIBUTE_UNUSED) { \ - static constexpr std::initializer_list<art::Primitive::Type> types{ __VA_ARGS__ }; \ - return std::find(types.begin(), types.end(), return_type->GetPrimitiveType()) != types.end(); \ +#define SIMPLE_VALID_RETURN_TYPE(type, ...) \ + template <> \ + bool ValidReturnType<type>([[maybe_unused]] art::Thread * self, \ + art::ObjPtr<art::mirror::Class> return_type, \ + [[maybe_unused]] type value) { \ + static constexpr std::initializer_list<art::Primitive::Type> types{__VA_ARGS__}; \ + return std::find(types.begin(), types.end(), return_type->GetPrimitiveType()) != types.end(); \ } SIMPLE_VALID_RETURN_TYPE(jlong, art::Primitive::kPrimLong); diff --git a/openjdkjvmti/ti_thread.cc b/openjdkjvmti/ti_thread.cc index b5bc35e7e6..13eebbff04 100644 --- a/openjdkjvmti/ti_thread.cc +++ b/openjdkjvmti/ti_thread.cc @@ -205,7 +205,7 @@ void ThreadUtil::Unregister() { runtime->GetRuntimeCallbacks()->RemoveThreadLifecycleCallback(&gThreadCallback); } -jvmtiError ThreadUtil::GetCurrentThread(jvmtiEnv* env ATTRIBUTE_UNUSED, jthread* thread_ptr) { +jvmtiError ThreadUtil::GetCurrentThread([[maybe_unused]] jvmtiEnv* env, jthread* thread_ptr) { art::Thread* self = art::Thread::Current(); art::ScopedObjectAccess soa(self); @@ -564,7 +564,7 @@ bool ThreadUtil::WouldSuspendForUserCode(art::Thread* self) { return WouldSuspendForUserCodeLocked(self); } -jvmtiError ThreadUtil::GetThreadState(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ThreadUtil::GetThreadState([[maybe_unused]] jvmtiEnv* env, jthread thread, jint* thread_state_ptr) { if (thread_state_ptr == nullptr) { @@ -940,7 +940,7 @@ jvmtiError ThreadUtil::SuspendSelf(art::Thread* self) { return OK; } -jvmtiError ThreadUtil::SuspendThread(jvmtiEnv* env ATTRIBUTE_UNUSED, jthread thread) { +jvmtiError ThreadUtil::SuspendThread([[maybe_unused]] jvmtiEnv* env, jthread thread) { art::Thread* self = art::Thread::Current(); bool target_is_self = false; { @@ -961,8 +961,7 @@ jvmtiError ThreadUtil::SuspendThread(jvmtiEnv* env ATTRIBUTE_UNUSED, jthread thr } } -jvmtiError ThreadUtil::ResumeThread(jvmtiEnv* env ATTRIBUTE_UNUSED, - jthread thread) { +jvmtiError ThreadUtil::ResumeThread([[maybe_unused]] jvmtiEnv* env, jthread thread) { if (thread == nullptr) { return ERR(NULL_POINTER); } @@ -1079,7 +1078,7 @@ jvmtiError ThreadUtil::ResumeThreadList(jvmtiEnv* env, return OK; } -jvmtiError ThreadUtil::StopThread(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError ThreadUtil::StopThread([[maybe_unused]] jvmtiEnv* env, jthread thread, jobject exception) { art::Thread* self = art::Thread::Current(); @@ -1128,7 +1127,7 @@ jvmtiError ThreadUtil::StopThread(jvmtiEnv* env ATTRIBUTE_UNUSED, } } -jvmtiError ThreadUtil::InterruptThread(jvmtiEnv* env ATTRIBUTE_UNUSED, jthread thread) { +jvmtiError ThreadUtil::InterruptThread([[maybe_unused]] jvmtiEnv* env, jthread thread) { art::Thread* self = art::Thread::Current(); art::ScopedObjectAccess soa(self); art::MutexLock tll_mu(self, *art::Locks::thread_list_lock_); diff --git a/openjdkjvmti/ti_timers.cc b/openjdkjvmti/ti_timers.cc index 11b58c452e..f02501fb1e 100644 --- a/openjdkjvmti/ti_timers.cc +++ b/openjdkjvmti/ti_timers.cc @@ -45,7 +45,7 @@ namespace openjdkjvmti { -jvmtiError TimerUtil::GetAvailableProcessors(jvmtiEnv* env ATTRIBUTE_UNUSED, +jvmtiError TimerUtil::GetAvailableProcessors([[maybe_unused]] jvmtiEnv* env, jint* processor_count_ptr) { if (processor_count_ptr == nullptr) { return ERR(NULL_POINTER); @@ -56,7 +56,7 @@ jvmtiError TimerUtil::GetAvailableProcessors(jvmtiEnv* env ATTRIBUTE_UNUSED, return ERR(NONE); } -jvmtiError TimerUtil::GetTimerInfo(jvmtiEnv* env ATTRIBUTE_UNUSED, jvmtiTimerInfo* info_ptr) { +jvmtiError TimerUtil::GetTimerInfo([[maybe_unused]] jvmtiEnv* env, jvmtiTimerInfo* info_ptr) { if (info_ptr == nullptr) { return ERR(NULL_POINTER); } @@ -69,7 +69,7 @@ jvmtiError TimerUtil::GetTimerInfo(jvmtiEnv* env ATTRIBUTE_UNUSED, jvmtiTimerInf return ERR(NONE); } -jvmtiError TimerUtil::GetTime(jvmtiEnv* env ATTRIBUTE_UNUSED, jlong* nanos_ptr) { +jvmtiError TimerUtil::GetTime([[maybe_unused]] jvmtiEnv* env, jlong* nanos_ptr) { if (nanos_ptr == nullptr) { return ERR(NULL_POINTER); } diff --git a/perfetto_hprof/perfetto_hprof.cc b/perfetto_hprof/perfetto_hprof.cc index 906362ab29..a6ad4a4976 100644 --- a/perfetto_hprof/perfetto_hprof.cc +++ b/perfetto_hprof/perfetto_hprof.cc @@ -438,10 +438,10 @@ class ReferredObjectsFinder { referred_objects_->emplace_back(std::move(field_name), ref); } - void VisitRootIfNonNull(art::mirror::CompressedReference<art::mirror::Object>* root - ATTRIBUTE_UNUSED) const {} - void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root - ATTRIBUTE_UNUSED) const {} + void VisitRootIfNonNull( + [[maybe_unused]] art::mirror::CompressedReference<art::mirror::Object>* root) const {} + void VisitRoot( + [[maybe_unused]] art::mirror::CompressedReference<art::mirror::Object>* root) const {} private: // We can use a raw Object* pointer here, because there are no concurrent GC threads after the diff --git a/runtime/aot_class_linker.h b/runtime/aot_class_linker.h index 30a19c87d3..be4ab2b938 100644 --- a/runtime/aot_class_linker.h +++ b/runtime/aot_class_linker.h @@ -39,11 +39,11 @@ static bool CanReferenceInBootImageExtension(ObjPtr<mirror::Class> klass, gc::He void SetSdkChecker(std::unique_ptr<SdkChecker>&& sdk_checker_); const SdkChecker* GetSdkChecker() const; - bool DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const override + bool DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtMethod* art_method) const override REQUIRES_SHARED(Locks::mutator_lock_); - bool DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const override + bool DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtField* art_field) const override REQUIRES_SHARED(Locks::mutator_lock_); - bool DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const override; + bool DenyAccessBasedOnPublicSdk([[maybe_unused]] const char* type_descriptor) const override; void SetEnablePublicSdkChecks(bool enabled) override; protected: diff --git a/runtime/arch/arm/fault_handler_arm.cc b/runtime/arch/arm/fault_handler_arm.cc index f02ec65f82..bf3eaa7bf2 100644 --- a/runtime/arch/arm/fault_handler_arm.cc +++ b/runtime/arch/arm/fault_handler_arm.cc @@ -45,7 +45,7 @@ static uint32_t GetInstructionSize(uint8_t* pc) { return instr_size; } -uintptr_t FaultManager::GetFaultPc(siginfo_t* siginfo ATTRIBUTE_UNUSED, void* context) { +uintptr_t FaultManager::GetFaultPc([[maybe_unused]] siginfo_t* siginfo, void* context) { ucontext_t* uc = reinterpret_cast<ucontext_t*>(context); mcontext_t* mc = reinterpret_cast<mcontext_t*>(&uc->uc_mcontext); if (mc->arm_sp == 0) { @@ -61,7 +61,7 @@ uintptr_t FaultManager::GetFaultSp(void* context) { return mc->arm_sp; } -bool NullPointerHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info, void* context) { +bool NullPointerHandler::Action([[maybe_unused]] int sig, siginfo_t* info, void* context) { uintptr_t fault_address = reinterpret_cast<uintptr_t>(info->si_addr); if (!IsValidFaultAddress(fault_address)) { return false; @@ -115,7 +115,8 @@ bool NullPointerHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info, void* // The offset from r9 is Thread::ThreadSuspendTriggerOffset(). // To check for a suspend check, we examine the instructions that caused // the fault (at PC-4 and PC). -bool SuspensionHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED, +bool SuspensionHandler::Action([[maybe_unused]] int sig, + [[maybe_unused]] siginfo_t* info, void* context) { // These are the instructions to check for. The first one is the ldr r0,[r9,#xxx] // where xxx is the offset of the suspend trigger. @@ -186,7 +187,8 @@ bool SuspensionHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBU // If we determine this is a stack overflow we need to move the stack pointer // to the overflow region below the protected region. -bool StackOverflowHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED, +bool StackOverflowHandler::Action([[maybe_unused]] int sig, + [[maybe_unused]] siginfo_t* info, void* context) { ucontext_t* uc = reinterpret_cast<ucontext_t*>(context); mcontext_t* mc = reinterpret_cast<mcontext_t*>(&uc->uc_mcontext); diff --git a/runtime/arch/arm/instruction_set_features_arm.cc b/runtime/arch/arm/instruction_set_features_arm.cc index 749476b703..3309523ba8 100644 --- a/runtime/arch/arm/instruction_set_features_arm.cc +++ b/runtime/arch/arm/instruction_set_features_arm.cc @@ -243,9 +243,9 @@ ArmFeaturesUniquePtr ArmInstructionSetFeatures::FromHwcap() { // A signal handler called by a fault for an illegal instruction. We record the fact in r0 // and then increment the PC in the signal context to return to the next instruction. We know the // instruction is 4 bytes long. -static void bad_instr_handle(int signo ATTRIBUTE_UNUSED, - siginfo_t* si ATTRIBUTE_UNUSED, - void* data) { +static void bad_instr_handle([[maybe_unused]] int signo, + [[maybe_unused]] siginfo_t* si, + void* data) { #if defined(__arm__) ucontext_t* uc = reinterpret_cast<ucontext_t*>(data); mcontext_t* mc = &uc->uc_mcontext; diff --git a/runtime/arch/arm64/fault_handler_arm64.cc b/runtime/arch/arm64/fault_handler_arm64.cc index 3878b573dd..cebff9b9fe 100644 --- a/runtime/arch/arm64/fault_handler_arm64.cc +++ b/runtime/arch/arm64/fault_handler_arm64.cc @@ -62,7 +62,7 @@ uintptr_t FaultManager::GetFaultSp(void* context) { return mc->sp; } -bool NullPointerHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info, void* context) { +bool NullPointerHandler::Action([[maybe_unused]] int sig, siginfo_t* info, void* context) { uintptr_t fault_address = reinterpret_cast<uintptr_t>(info->si_addr); if (!IsValidFaultAddress(fault_address)) { return false; @@ -96,7 +96,8 @@ bool NullPointerHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info, void* // A suspend check is done using the following instruction: // 0x...: f94002b5 ldr x21, [x21, #0] // To check for a suspend check, we examine the instruction that caused the fault (at PC). -bool SuspensionHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED, +bool SuspensionHandler::Action([[maybe_unused]] int sig, + [[maybe_unused]] siginfo_t* info, void* context) { constexpr uint32_t kSuspendCheckRegister = 21; constexpr uint32_t checkinst = @@ -128,7 +129,8 @@ bool SuspensionHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBU return true; } -bool StackOverflowHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED, +bool StackOverflowHandler::Action([[maybe_unused]] int sig, + [[maybe_unused]] siginfo_t* info, void* context) { ucontext_t* uc = reinterpret_cast<ucontext_t*>(context); mcontext_t* mc = reinterpret_cast<mcontext_t*>(&uc->uc_mcontext); diff --git a/runtime/arch/context.h b/runtime/arch/context.h index be7adc7ca1..efeacd65a8 100644 --- a/runtime/arch/context.h +++ b/runtime/arch/context.h @@ -90,9 +90,7 @@ class Context { // Set `new_value` to the physical register containing the dex PC pointer in // an nterp frame. - virtual void SetNterpDexPC(uintptr_t new_value ATTRIBUTE_UNUSED) { - abort(); - } + virtual void SetNterpDexPC([[maybe_unused]] uintptr_t new_value) { abort(); } // Switches execution of the executing context to this context NO_RETURN virtual void DoLongJump() = 0; diff --git a/runtime/arch/instruction_set_features.cc b/runtime/arch/instruction_set_features.cc index d88c544b35..17b9dc39bf 100644 --- a/runtime/arch/instruction_set_features.cc +++ b/runtime/arch/instruction_set_features.cc @@ -313,7 +313,7 @@ bool InstructionSetFeatures::FindVariantInArray(const char* const variants[], si } std::unique_ptr<const InstructionSetFeatures> InstructionSetFeatures::AddRuntimeDetectedFeatures( - const InstructionSetFeatures *features ATTRIBUTE_UNUSED) const { + [[maybe_unused]] const InstructionSetFeatures* features) const { UNIMPLEMENTED(FATAL) << kRuntimeISA; UNREACHABLE(); } diff --git a/runtime/arch/instruction_set_features.h b/runtime/arch/instruction_set_features.h index 1f41b39023..1cb0fbb758 100644 --- a/runtime/arch/instruction_set_features.h +++ b/runtime/arch/instruction_set_features.h @@ -146,8 +146,8 @@ class InstructionSetFeatures { std::string* error_msg) const = 0; // Add run-time detected architecture specific features in sub-classes. - virtual std::unique_ptr<const InstructionSetFeatures> - AddRuntimeDetectedFeatures(const InstructionSetFeatures *features ATTRIBUTE_UNUSED) const; + virtual std::unique_ptr<const InstructionSetFeatures> AddRuntimeDetectedFeatures( + [[maybe_unused]] const InstructionSetFeatures* features) const; private: DISALLOW_COPY_AND_ASSIGN(InstructionSetFeatures); diff --git a/runtime/arch/riscv64/instruction_set_features_riscv64.cc b/runtime/arch/riscv64/instruction_set_features_riscv64.cc index 2ef4f8493d..544b717c3f 100644 --- a/runtime/arch/riscv64/instruction_set_features_riscv64.cc +++ b/runtime/arch/riscv64/instruction_set_features_riscv64.cc @@ -30,7 +30,7 @@ constexpr uint32_t BasicFeatures() { } Riscv64FeaturesUniquePtr Riscv64InstructionSetFeatures::FromVariant( - const std::string& variant, std::string* error_msg ATTRIBUTE_UNUSED) { + const std::string& variant, [[maybe_unused]] std::string* error_msg) { if (variant != "generic") { LOG(WARNING) << "Unexpected CPU variant for Riscv64 using defaults: " << variant; } @@ -90,8 +90,8 @@ std::string Riscv64InstructionSetFeatures::GetFeatureString() const { std::unique_ptr<const InstructionSetFeatures> Riscv64InstructionSetFeatures::AddFeaturesFromSplitString( - const std::vector<std::string>& features ATTRIBUTE_UNUSED, - std::string* error_msg ATTRIBUTE_UNUSED) const { + [[maybe_unused]] const std::vector<std::string>& features, + [[maybe_unused]] std::string* error_msg) const { UNIMPLEMENTED(WARNING); return std::unique_ptr<const InstructionSetFeatures>(new Riscv64InstructionSetFeatures(bits_)); } diff --git a/runtime/arch/x86/fault_handler_x86.cc b/runtime/arch/x86/fault_handler_x86.cc index cd2d38f337..efc5249582 100644 --- a/runtime/arch/x86/fault_handler_x86.cc +++ b/runtime/arch/x86/fault_handler_x86.cc @@ -259,7 +259,7 @@ static uint32_t GetInstructionSize(const uint8_t* pc, size_t bytes) { #undef FETCH_OR_SKIP_BYTE } -uintptr_t FaultManager::GetFaultPc(siginfo_t* siginfo ATTRIBUTE_UNUSED, void* context) { +uintptr_t FaultManager::GetFaultPc([[maybe_unused]] siginfo_t* siginfo, void* context) { ucontext_t* uc = reinterpret_cast<ucontext_t*>(context); if (uc->CTX_ESP == 0) { VLOG(signals) << "Missing SP"; diff --git a/runtime/arch/x86/instruction_set_features_x86.cc b/runtime/arch/x86/instruction_set_features_x86.cc index f11aca93ee..6976e9c234 100644 --- a/runtime/arch/x86/instruction_set_features_x86.cc +++ b/runtime/arch/x86/instruction_set_features_x86.cc @@ -119,9 +119,9 @@ X86FeaturesUniquePtr X86InstructionSetFeatures::Create(bool x86_64, } } -X86FeaturesUniquePtr X86InstructionSetFeatures::FromVariant( - const std::string& variant, std::string* error_msg ATTRIBUTE_UNUSED, - bool x86_64) { +X86FeaturesUniquePtr X86InstructionSetFeatures::FromVariant(const std::string& variant, + [[maybe_unused]] std::string* error_msg, + bool x86_64) { const bool is_runtime_isa = kRuntimeISA == (x86_64 ? InstructionSet::kX86_64 : InstructionSet::kX86); if (is_runtime_isa && variant == "default") { diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h index 3ea5130162..7353b149ac 100644 --- a/runtime/art_method-inl.h +++ b/runtime/art_method-inl.h @@ -52,7 +52,7 @@ namespace detail { template <> struct ShortyTraits<'V'> { using Type = void; - static Type Get(const JValue& value ATTRIBUTE_UNUSED) {} + static Type Get([[maybe_unused]] const JValue& value) {} // `kVRegCount` and `Set()` are not defined. }; @@ -152,8 +152,8 @@ constexpr size_t NumberOfVRegs() { } template <char... ArgType> -inline ALWAYS_INLINE void FillVRegs(uint32_t* vregs ATTRIBUTE_UNUSED, - typename ShortyTraits<ArgType>::Type... args ATTRIBUTE_UNUSED) +inline ALWAYS_INLINE void FillVRegs([[maybe_unused]] uint32_t* vregs, + [[maybe_unused]] typename ShortyTraits<ArgType>::Type... args) REQUIRES_SHARED(Locks::mutator_lock_) {} template <char FirstArgType, char... ArgType> diff --git a/runtime/base/quasi_atomic.h b/runtime/base/quasi_atomic.h index 5aa4ddea25..95d7bb21be 100644 --- a/runtime/base/quasi_atomic.h +++ b/runtime/base/quasi_atomic.h @@ -46,7 +46,7 @@ class Mutex; // quasiatomic operations that are performed on partially-overlapping // memory. class QuasiAtomic { - static constexpr bool NeedSwapMutexes(InstructionSet isa ATTRIBUTE_UNUSED) { + static constexpr bool NeedSwapMutexes([[maybe_unused]] InstructionSet isa) { // TODO: Remove this function now that mips support has been removed. return false; } diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index fca86a5a48..f3562a4cb5 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -1496,16 +1496,14 @@ class CountInternedStringReferencesVisitor { // Visit Class Fields void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { // References within image or across images don't need a read barrier. ObjPtr<mirror::Object> referred_obj = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset); TestObject(referred_obj); } - void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, - ObjPtr<mirror::Reference> ref) const + void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false); } @@ -3340,7 +3338,7 @@ struct ScopedDefiningClass { return Finish(h_klass); } - ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED) + ObjPtr<mirror::Class> Finish([[maybe_unused]] nullptr_t np) REQUIRES_SHARED(Locks::mutator_lock_) { ScopedNullHandle<mirror::Class> snh; return Finish(snh); @@ -7361,8 +7359,8 @@ class ClassLinker::LinkMethodsHelper { class VTableIndexCheckerRelease { protected: - explicit VTableIndexCheckerRelease(size_t vtable_length ATTRIBUTE_UNUSED) {} - void CheckIndex(uint32_t index ATTRIBUTE_UNUSED) const {} + explicit VTableIndexCheckerRelease([[maybe_unused]] size_t vtable_length) {} + void CheckIndex([[maybe_unused]] uint32_t index) const {} }; using VTableIndexChecker = @@ -10927,27 +10925,27 @@ ObjPtr<mirror::ClassLoader> ClassLinker::GetHoldingClassLoaderOfCopiedMethod(Thr Runtime::Current()->GetJavaVM()->DecodeWeakGlobalAsStrong(result)); } -bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const +bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtMethod* art_method) const REQUIRES_SHARED(Locks::mutator_lock_) { // Should not be called on ClassLinker, only on AotClassLinker that overrides this. LOG(FATAL) << "UNREACHABLE"; UNREACHABLE(); } -bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const +bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtField* art_field) const REQUIRES_SHARED(Locks::mutator_lock_) { // Should not be called on ClassLinker, only on AotClassLinker that overrides this. LOG(FATAL) << "UNREACHABLE"; UNREACHABLE(); } -bool ClassLinker::DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const { +bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] const char* type_descriptor) const { // Should not be called on ClassLinker, only on AotClassLinker that overrides this. LOG(FATAL) << "UNREACHABLE"; UNREACHABLE(); } -void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) { +void ClassLinker::SetEnablePublicSdkChecks([[maybe_unused]] bool enabled) { // Should not be called on ClassLinker, only on AotClassLinker that overrides this. LOG(FATAL) << "UNREACHABLE"; UNREACHABLE(); diff --git a/runtime/class_linker.h b/runtime/class_linker.h index d14e46a4c1..6fdd94d8dc 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -1414,13 +1414,13 @@ class ClassLoadCallback { // different object. It is the listener's responsibility to handle this. // Note: This callback is rarely useful so a default implementation has been given that does // nothing. - virtual void ClassPreDefine(const char* descriptor ATTRIBUTE_UNUSED, - Handle<mirror::Class> klass ATTRIBUTE_UNUSED, - Handle<mirror::ClassLoader> class_loader ATTRIBUTE_UNUSED, - const DexFile& initial_dex_file ATTRIBUTE_UNUSED, - const dex::ClassDef& initial_class_def ATTRIBUTE_UNUSED, - /*out*/DexFile const** final_dex_file ATTRIBUTE_UNUSED, - /*out*/dex::ClassDef const** final_class_def ATTRIBUTE_UNUSED) + virtual void ClassPreDefine([[maybe_unused]] const char* descriptor, + [[maybe_unused]] Handle<mirror::Class> klass, + [[maybe_unused]] Handle<mirror::ClassLoader> class_loader, + [[maybe_unused]] const DexFile& initial_dex_file, + [[maybe_unused]] const dex::ClassDef& initial_class_def, + [[maybe_unused]] /*out*/ DexFile const** final_dex_file, + [[maybe_unused]] /*out*/ dex::ClassDef const** final_class_def) REQUIRES_SHARED(Locks::mutator_lock_) {} // A class has been loaded. diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index 95b224f606..981f5ea1d6 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -443,7 +443,7 @@ class ClassLinkerTest : public CommonRuntimeTest { class TestRootVisitor : public SingleRootVisitor { public: - void VisitRoot(mirror::Object* root, const RootInfo& info ATTRIBUTE_UNUSED) override { + void VisitRoot(mirror::Object* root, [[maybe_unused]] const RootInfo& info) override { EXPECT_TRUE(root != nullptr); } }; diff --git a/runtime/common_runtime_test.h b/runtime/common_runtime_test.h index 85c48a240c..0f82c4bccf 100644 --- a/runtime/common_runtime_test.h +++ b/runtime/common_runtime_test.h @@ -136,7 +136,7 @@ class CommonRuntimeTestImpl : public CommonArtTestImpl { protected: // Allow subclases such as CommonCompilerTest to add extra options. - virtual void SetUpRuntimeOptions(RuntimeOptions* options ATTRIBUTE_UNUSED) {} + virtual void SetUpRuntimeOptions([[maybe_unused]] RuntimeOptions* options) {} // Called before the runtime is created. virtual void PreRuntimeCreate() {} diff --git a/runtime/compiler_callbacks.h b/runtime/compiler_callbacks.h index f76ee660e0..3fa2fa3022 100644 --- a/runtime/compiler_callbacks.h +++ b/runtime/compiler_callbacks.h @@ -52,25 +52,25 @@ class CompilerCallbacks { virtual void ClassRejected(ClassReference ref) = 0; virtual verifier::VerifierDeps* GetVerifierDeps() const = 0; - virtual void SetVerifierDeps(verifier::VerifierDeps* deps ATTRIBUTE_UNUSED) {} + virtual void SetVerifierDeps([[maybe_unused]] verifier::VerifierDeps* deps) {} // Return the class status of a previous stage of the compilation. This can be used, for example, // when class unloading is enabled during multidex compilation. - virtual ClassStatus GetPreviousClassState(ClassReference ref ATTRIBUTE_UNUSED) { + virtual ClassStatus GetPreviousClassState([[maybe_unused]] ClassReference ref) { return ClassStatus::kNotReady; } - virtual void SetDoesClassUnloading(bool does_class_unloading ATTRIBUTE_UNUSED, - CompilerDriver* compiler_driver ATTRIBUTE_UNUSED) {} + virtual void SetDoesClassUnloading([[maybe_unused]] bool does_class_unloading, + [[maybe_unused]] CompilerDriver* compiler_driver) {} bool IsBootImage() { return mode_ == CallbackMode::kCompileBootImage; } - virtual void UpdateClassState(ClassReference ref ATTRIBUTE_UNUSED, - ClassStatus state ATTRIBUTE_UNUSED) {} + virtual void UpdateClassState([[maybe_unused]] ClassReference ref, + [[maybe_unused]] ClassStatus state) {} - virtual bool CanUseOatStatusForVerification(mirror::Class* klass ATTRIBUTE_UNUSED) + virtual bool CanUseOatStatusForVerification([[maybe_unused]] mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_) { return false; } diff --git a/runtime/elf_file.cc b/runtime/elf_file.cc index 0b4f1f3be2..2ceda1245f 100644 --- a/runtime/elf_file.cc +++ b/runtime/elf_file.cc @@ -1076,7 +1076,7 @@ bool ElfFileImpl<ElfTypes>::GetLoadedAddressRange(/*out*/uint8_t** vaddr_begin, } static InstructionSet GetInstructionSetFromELF(uint16_t e_machine, - uint32_t e_flags ATTRIBUTE_UNUSED) { + [[maybe_unused]] uint32_t e_flags) { switch (e_machine) { case EM_ARM: return InstructionSet::kArm; diff --git a/runtime/entrypoints/quick/quick_field_entrypoints.cc b/runtime/entrypoints/quick/quick_field_entrypoints.cc index e2fc232670..fb32c95fff 100644 --- a/runtime/entrypoints/quick/quick_field_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_field_entrypoints.cc @@ -420,7 +420,7 @@ extern "C" mirror::Object* artReadBarrierMark(mirror::Object* obj) { return ReadBarrier::Mark(obj); } -extern "C" mirror::Object* artReadBarrierSlow(mirror::Object* ref ATTRIBUTE_UNUSED, +extern "C" mirror::Object* artReadBarrierSlow([[maybe_unused]] mirror::Object* ref, mirror::Object* obj, uint32_t offset) { // Used only in connection with non-volatile loads. diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 7e96f2947c..905cee2dfb 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -1634,9 +1634,8 @@ class ComputeNativeCallFrameSize { } virtual void WalkHeader( - BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm ATTRIBUTE_UNUSED) - REQUIRES_SHARED(Locks::mutator_lock_) { - } + [[maybe_unused]] BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) + REQUIRES_SHARED(Locks::mutator_lock_) {} void Walk(const char* shorty, uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) { BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this); @@ -2174,10 +2173,8 @@ extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck( } extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck( - uint32_t method_idx, - mirror::Object* this_object ATTRIBUTE_UNUSED, - Thread* self, - ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) { + uint32_t method_idx, [[maybe_unused]] mirror::Object* this_object, Thread* self, ArtMethod** sp) + REQUIRES_SHARED(Locks::mutator_lock_) { // For static, this_object is not required and may be random garbage. Don't pass it down so that // it doesn't cause ObjPtr alignment failure check. return artInvokeCommon<kStatic>(method_idx, nullptr, self, sp); diff --git a/runtime/fault_handler.cc b/runtime/fault_handler.cc index a3c1f3bdf5..7e5e745eef 100644 --- a/runtime/fault_handler.cc +++ b/runtime/fault_handler.cc @@ -223,7 +223,7 @@ bool FaultManager::HandleFaultByOtherHandlers(int sig, siginfo_t* info, void* co return false; } -bool FaultManager::HandleSigbusFault(int sig, siginfo_t* info, void* context ATTRIBUTE_UNUSED) { +bool FaultManager::HandleSigbusFault(int sig, siginfo_t* info, [[maybe_unused]] void* context) { DCHECK_EQ(sig, SIGBUS); if (VLOG_IS_ON(signals)) { PrintSignalInfo(VLOG_STREAM(signals) << "Handling SIGBUS fault:\n", info); @@ -578,7 +578,7 @@ JavaStackTraceHandler::JavaStackTraceHandler(FaultManager* manager) : FaultHandl manager_->AddHandler(this, false); } -bool JavaStackTraceHandler::Action(int sig ATTRIBUTE_UNUSED, siginfo_t* siginfo, void* context) { +bool JavaStackTraceHandler::Action([[maybe_unused]] int sig, siginfo_t* siginfo, void* context) { // Make sure that we are in the generated code, but we may not have a dex pc. bool in_generated_code = manager_->IsInGeneratedCode(siginfo, context); if (in_generated_code) { diff --git a/runtime/gc/accounting/mod_union_table.cc b/runtime/gc/accounting/mod_union_table.cc index 4a84799431..85f7164b58 100644 --- a/runtime/gc/accounting/mod_union_table.cc +++ b/runtime/gc/accounting/mod_union_table.cc @@ -43,7 +43,7 @@ class ModUnionAddToCardSetVisitor { inline void operator()(uint8_t* card, uint8_t expected_value, - uint8_t new_value ATTRIBUTE_UNUSED) const { + [[maybe_unused]] uint8_t new_value) const { if (expected_value == CardTable::kCardDirty) { cleared_cards_->insert(card); } @@ -60,7 +60,7 @@ class ModUnionAddToCardBitmapVisitor { inline void operator()(uint8_t* card, uint8_t expected_value, - uint8_t new_value ATTRIBUTE_UNUSED) const { + [[maybe_unused]] uint8_t new_value) const { if (expected_value == CardTable::kCardDirty) { // We want the address the card represents, not the address of the card. bitmap_->Set(reinterpret_cast<uintptr_t>(card_table_->AddrFromCard(card))); @@ -78,7 +78,7 @@ class ModUnionAddToCardVectorVisitor { : cleared_cards_(cleared_cards) { } - void operator()(uint8_t* card, uint8_t expected_card, uint8_t new_card ATTRIBUTE_UNUSED) const { + void operator()(uint8_t* card, uint8_t expected_card, [[maybe_unused]] uint8_t new_card) const { if (expected_card == CardTable::kCardDirty) { cleared_cards_->push_back(card); } @@ -100,7 +100,7 @@ class ModUnionUpdateObjectReferencesVisitor { contains_reference_to_other_space_(contains_reference_to_other_space) {} // Extra parameters are required since we use this same visitor signature for checking objects. - void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const + void operator()(mirror::Object* obj, MemberOffset offset, [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { MarkReference(obj->GetFieldObjectReferenceAddr(offset)); } @@ -195,7 +195,7 @@ class AddToReferenceArrayVisitor { has_target_reference_(has_target_reference) {} // Extra parameters are required since we use this same visitor signature for checking objects. - void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const + void operator()(mirror::Object* obj, MemberOffset offset, [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { mirror::HeapReference<mirror::Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset); mirror::Object* ref = ref_ptr->AsMirrorPtr(); @@ -270,7 +270,7 @@ class CheckReferenceVisitor { references_(references) {} // Extra parameters are required since we use this same visitor signature for checking objects. - void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const + void operator()(mirror::Object* obj, MemberOffset offset, [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset); if (ref != nullptr && diff --git a/runtime/gc/accounting/mod_union_table_test.cc b/runtime/gc/accounting/mod_union_table_test.cc index 3f38f5069e..f1f10d2adf 100644 --- a/runtime/gc/accounting/mod_union_table_test.cc +++ b/runtime/gc/accounting/mod_union_table_test.cc @@ -100,7 +100,7 @@ class CollectVisitedVisitor : public MarkObjectVisitor { public: explicit CollectVisitedVisitor(std::set<mirror::Object*>* out) : out_(out) {} void MarkHeapReference(mirror::HeapReference<mirror::Object>* ref, - bool do_atomic_update ATTRIBUTE_UNUSED) override + [[maybe_unused]] bool do_atomic_update) override REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(ref != nullptr); MarkObject(ref->AsMirrorPtr()); diff --git a/runtime/gc/accounting/remembered_set.cc b/runtime/gc/accounting/remembered_set.cc index fba62c3d67..e4ee3052bf 100644 --- a/runtime/gc/accounting/remembered_set.cc +++ b/runtime/gc/accounting/remembered_set.cc @@ -42,7 +42,7 @@ class RememberedSetCardVisitor { explicit RememberedSetCardVisitor(RememberedSet::CardSet* const dirty_cards) : dirty_cards_(dirty_cards) {} - void operator()(uint8_t* card, uint8_t expected_value, uint8_t new_value ATTRIBUTE_UNUSED) const { + void operator()(uint8_t* card, uint8_t expected_value, [[maybe_unused]] uint8_t new_value) const { if (expected_value == CardTable::kCardDirty) { dirty_cards_->insert(card); } @@ -69,8 +69,7 @@ class RememberedSetReferenceVisitor { void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(obj != nullptr); mirror::HeapReference<mirror::Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset); if (target_space_->HasAddress(ref_ptr->AsMirrorPtr())) { diff --git a/runtime/gc/accounting/space_bitmap_test.cc b/runtime/gc/accounting/space_bitmap_test.cc index 8fcf102406..72fcef078c 100644 --- a/runtime/gc/accounting/space_bitmap_test.cc +++ b/runtime/gc/accounting/space_bitmap_test.cc @@ -134,9 +134,7 @@ class SimpleCounter { public: explicit SimpleCounter(size_t* counter) : count_(counter) {} - void operator()(mirror::Object* obj ATTRIBUTE_UNUSED) const { - (*count_)++; - } + void operator()([[maybe_unused]] mirror::Object* obj) const { (*count_)++; } size_t* const count_; }; @@ -203,9 +201,7 @@ static void RunTestCount() { uintptr_t range_end, size_t manual_count) { size_t count = 0; - auto count_fn = [&count](mirror::Object* obj ATTRIBUTE_UNUSED) { - count++; - }; + auto count_fn = [&count]([[maybe_unused]] mirror::Object* obj) { count++; }; space_bitmap->VisitMarkedRange(range_begin, range_end, count_fn); EXPECT_EQ(count, manual_count); }; diff --git a/runtime/gc/allocation_listener.h b/runtime/gc/allocation_listener.h index 376b524862..f286c6cf6e 100644 --- a/runtime/gc/allocation_listener.h +++ b/runtime/gc/allocation_listener.h @@ -54,9 +54,9 @@ class AllocationListener { // PreObjectAlloc and the newly allocated object being visible to heap-walks. // // This can also be used to make any last-minute changes to the type or size of the allocation. - virtual void PreObjectAllocated(Thread* self ATTRIBUTE_UNUSED, - MutableHandle<mirror::Class> type ATTRIBUTE_UNUSED, - size_t* byte_count ATTRIBUTE_UNUSED) + virtual void PreObjectAllocated([[maybe_unused]] Thread* self, + [[maybe_unused]] MutableHandle<mirror::Class> type, + [[maybe_unused]] size_t* byte_count) REQUIRES(!Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_) {} // Fast check if we want to get the PreObjectAllocated callback, to avoid the expense of creating // handles. Defaults to false. diff --git a/runtime/gc/allocator/art-dlmalloc.cc b/runtime/gc/allocator/art-dlmalloc.cc index de0c85a407..6296acd3d6 100644 --- a/runtime/gc/allocator/art-dlmalloc.cc +++ b/runtime/gc/allocator/art-dlmalloc.cc @@ -83,8 +83,8 @@ extern "C" void DlmallocMadviseCallback(void* start, void* end, size_t used_byte } } -extern "C" void DlmallocBytesAllocatedCallback(void* start ATTRIBUTE_UNUSED, - void* end ATTRIBUTE_UNUSED, +extern "C" void DlmallocBytesAllocatedCallback([[maybe_unused]] void* start, + [[maybe_unused]] void* end, size_t used_bytes, void* arg) { if (used_bytes == 0) { @@ -94,8 +94,8 @@ extern "C" void DlmallocBytesAllocatedCallback(void* start ATTRIBUTE_UNUSED, *bytes_allocated += used_bytes + sizeof(size_t); } -extern "C" void DlmallocObjectsAllocatedCallback(void* start ATTRIBUTE_UNUSED, - void* end ATTRIBUTE_UNUSED, +extern "C" void DlmallocObjectsAllocatedCallback([[maybe_unused]] void* start, + [[maybe_unused]] void* end, size_t used_bytes, void* arg) { if (used_bytes == 0) { diff --git a/runtime/gc/allocator/rosalloc.cc b/runtime/gc/allocator/rosalloc.cc index 320440d3e3..656e29d081 100644 --- a/runtime/gc/allocator/rosalloc.cc +++ b/runtime/gc/allocator/rosalloc.cc @@ -1720,8 +1720,10 @@ void RosAlloc::Initialize() { DCHECK_EQ(kMaxRegularBracketSize, bracketSizes[kNumRegularSizeBrackets - 1]); } -void RosAlloc::BytesAllocatedCallback(void* start ATTRIBUTE_UNUSED, void* end ATTRIBUTE_UNUSED, - size_t used_bytes, void* arg) { +void RosAlloc::BytesAllocatedCallback([[maybe_unused]] void* start, + [[maybe_unused]] void* end, + size_t used_bytes, + void* arg) { if (used_bytes == 0) { return; } @@ -1729,8 +1731,10 @@ void RosAlloc::BytesAllocatedCallback(void* start ATTRIBUTE_UNUSED, void* end AT *bytes_allocated += used_bytes; } -void RosAlloc::ObjectsAllocatedCallback(void* start ATTRIBUTE_UNUSED, void* end ATTRIBUTE_UNUSED, - size_t used_bytes, void* arg) { +void RosAlloc::ObjectsAllocatedCallback([[maybe_unused]] void* start, + [[maybe_unused]] void* end, + size_t used_bytes, + void* arg) { if (used_bytes == 0) { return; } diff --git a/runtime/gc/allocator/rosalloc.h b/runtime/gc/allocator/rosalloc.h index 9a09c88927..bb2f426aef 100644 --- a/runtime/gc/allocator/rosalloc.h +++ b/runtime/gc/allocator/rosalloc.h @@ -303,7 +303,7 @@ class RosAlloc { // The number of slots in the list. This is used to make it fast to check if a free list is all // free without traversing the whole free list. uint32_t size_; - uint32_t padding_ ATTRIBUTE_UNUSED; + [[maybe_unused]] uint32_t padding_; friend class RosAlloc; }; @@ -354,7 +354,7 @@ class RosAlloc { uint8_t is_thread_local_; // True if this run is used as a thread-local run. bool to_be_bulk_freed_; // Used within BulkFree() to flag a run that's involved with // a bulk free. - uint32_t padding_ ATTRIBUTE_UNUSED; + [[maybe_unused]] uint32_t padding_; // Use a tailless free list for free_list_ so that the alloc fast path does not manage the tail. SlotFreeList<false> free_list_; SlotFreeList<true> bulk_free_list_; diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index 1f123aaff5..3e958719fe 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -300,7 +300,7 @@ class ConcurrentCopying::ActivateReadBarrierEntrypointsCallback : public Closure explicit ActivateReadBarrierEntrypointsCallback(ConcurrentCopying* concurrent_copying) : concurrent_copying_(concurrent_copying) {} - void Run(Thread* self ATTRIBUTE_UNUSED) override REQUIRES(Locks::thread_list_lock_) { + void Run([[maybe_unused]] Thread* self) override REQUIRES(Locks::thread_list_lock_) { // This needs to run under the thread_list_lock_ critical section in ThreadList::RunCheckpoint() // to avoid a race with ThreadList::Register(). CHECK(!concurrent_copying_->is_using_read_barrier_entrypoints_); @@ -509,7 +509,7 @@ class ConcurrentCopying::ThreadFlipVisitor : public Closure, public RootVisitor void VisitRoots(mirror::Object*** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) override + [[maybe_unused]] const RootInfo& info) override REQUIRES_SHARED(Locks::mutator_lock_) { Thread* self = Thread::Current(); for (size_t i = 0; i < count; ++i) { @@ -526,7 +526,7 @@ class ConcurrentCopying::ThreadFlipVisitor : public Closure, public RootVisitor void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) override + [[maybe_unused]] const RootInfo& info) override REQUIRES_SHARED(Locks::mutator_lock_) { Thread* self = Thread::Current(); for (size_t i = 0; i < count; ++i) { @@ -700,7 +700,7 @@ class ConcurrentCopying::VerifyNoMissingCardMarkVisitor { void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE { if (offset.Uint32Value() != mirror::Object::ClassOffset().Uint32Value()) { CheckReference(obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>( @@ -952,7 +952,7 @@ class ConcurrentCopying::CaptureRootsForMarkingVisitor : public RootVisitor { void VisitRoots(mirror::Object*** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) override + [[maybe_unused]] const RootInfo& info) override REQUIRES_SHARED(Locks::mutator_lock_) { for (size_t i = 0; i < count; ++i) { mirror::Object** root = roots[i]; @@ -965,7 +965,7 @@ class ConcurrentCopying::CaptureRootsForMarkingVisitor : public RootVisitor { void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) override + [[maybe_unused]] const RootInfo& info) override REQUIRES_SHARED(Locks::mutator_lock_) { for (size_t i = 0; i < count; ++i) { mirror::CompressedReference<mirror::Object>* const root = roots[i]; @@ -1770,7 +1770,7 @@ class ConcurrentCopying::DisableMarkingCallback : public Closure { : concurrent_copying_(concurrent_copying) { } - void Run(Thread* self ATTRIBUTE_UNUSED) override REQUIRES(Locks::thread_list_lock_) { + void Run([[maybe_unused]] Thread* self) override REQUIRES(Locks::thread_list_lock_) { // This needs to run under the thread_list_lock_ critical section in ThreadList::RunCheckpoint() // to avoid a race with ThreadList::Register(). CHECK(concurrent_copying_->is_marking_); @@ -1941,8 +1941,8 @@ class ConcurrentCopying::VerifyNoFromSpaceRefsVisitor : public SingleRootVisitor } } - void VisitRoot(mirror::Object* root, const RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void VisitRoot(mirror::Object* root, [[maybe_unused]] const RootInfo& info) override + REQUIRES_SHARED(Locks::mutator_lock_) { DCHECK(root != nullptr); operator()(root); } @@ -1958,7 +1958,7 @@ class ConcurrentCopying::VerifyNoFromSpaceRefsFieldVisitor { void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE { mirror::Object* ref = obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset); @@ -2053,13 +2053,13 @@ class ConcurrentCopying::AssertToSpaceInvariantFieldVisitor { void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE { mirror::Object* ref = obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset); collector_->AssertToSpaceInvariant(obj.Ptr(), offset, ref); } - void operator()(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<mirror::Class> klass, [[maybe_unused]] ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE { CHECK(klass->IsTypeOfReferenceClass()); } @@ -2417,7 +2417,7 @@ class ConcurrentCopying::DisableWeakRefAccessCallback : public Closure { : concurrent_copying_(concurrent_copying) { } - void Run(Thread* self ATTRIBUTE_UNUSED) override REQUIRES(Locks::thread_list_lock_) { + void Run([[maybe_unused]] Thread* self) override REQUIRES(Locks::thread_list_lock_) { // This needs to run under the thread_list_lock_ critical section in ThreadList::RunCheckpoint() // to avoid a deadlock b/31500969. CHECK(concurrent_copying_->weak_ref_access_enabled_); @@ -3266,8 +3266,9 @@ inline void ConcurrentCopying::Process(mirror::Object* obj, MemberOffset offset) } // Process some roots. -inline void ConcurrentCopying::VisitRoots( - mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) { +inline void ConcurrentCopying::VisitRoots(mirror::Object*** roots, + size_t count, + [[maybe_unused]] const RootInfo& info) { Thread* const self = Thread::Current(); for (size_t i = 0; i < count; ++i) { mirror::Object** root = roots[i]; @@ -3308,9 +3309,9 @@ inline void ConcurrentCopying::MarkRoot(Thread* const self, } } -inline void ConcurrentCopying::VisitRoots( - mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) { +inline void ConcurrentCopying::VisitRoots(mirror::CompressedReference<mirror::Object>** roots, + size_t count, + [[maybe_unused]] const RootInfo& info) { Thread* const self = Thread::Current(); for (size_t i = 0; i < count; ++i) { mirror::CompressedReference<mirror::Object>* const root = roots[i]; diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc index f9a4e72a50..ea303ec58b 100644 --- a/runtime/gc/collector/mark_compact.cc +++ b/runtime/gc/collector/mark_compact.cc @@ -625,7 +625,7 @@ class MarkCompact::FlipCallback : public Closure { public: explicit FlipCallback(MarkCompact* collector) : collector_(collector) {} - void Run(Thread* thread ATTRIBUTE_UNUSED) override REQUIRES(Locks::mutator_lock_) { + void Run([[maybe_unused]] Thread* thread) override REQUIRES(Locks::mutator_lock_) { collector_->CompactionPause(); } @@ -851,7 +851,7 @@ class MarkCompact::ConcurrentCompactionGcTask : public SelfDeletingTask { explicit ConcurrentCompactionGcTask(MarkCompact* collector, size_t idx) : collector_(collector), index_(idx) {} - void Run(Thread* self ATTRIBUTE_UNUSED) override REQUIRES_SHARED(Locks::mutator_lock_) { + void Run([[maybe_unused]] Thread* self) override REQUIRES_SHARED(Locks::mutator_lock_) { if (collector_->CanCompactMovingSpaceWithMinorFault()) { collector_->ConcurrentCompaction<MarkCompact::kMinorFaultMode>(/*buf=*/nullptr); } else { @@ -1331,9 +1331,10 @@ class MarkCompact::RefsUpdateVisitor { DCHECK(!kCheckEnd || end != nullptr); } - void operator()(mirror::Object* old ATTRIBUTE_UNUSED, MemberOffset offset, bool /* is_static */) - const ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) - REQUIRES_SHARED(Locks::heap_bitmap_lock_) { + void operator()([[maybe_unused]] mirror::Object* old, + MemberOffset offset, + [[maybe_unused]] bool is_static) const ALWAYS_INLINE + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES_SHARED(Locks::heap_bitmap_lock_) { bool update = true; if (kCheckBegin || kCheckEnd) { uint8_t* ref = reinterpret_cast<uint8_t*>(obj_) + offset.Int32Value(); @@ -1348,12 +1349,11 @@ class MarkCompact::RefsUpdateVisitor { // VisitReferenes(). // TODO: Optimize reference updating using SIMD instructions. Object arrays // are perfect as all references are tightly packed. - void operator()(mirror::Object* old ATTRIBUTE_UNUSED, + void operator()([[maybe_unused]] mirror::Object* old, MemberOffset offset, - bool /*is_static*/, - bool /*is_obj_array*/) - const ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) - REQUIRES_SHARED(Locks::heap_bitmap_lock_) { + [[maybe_unused]] bool is_static, + [[maybe_unused]] bool is_obj_array) const ALWAYS_INLINE + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES_SHARED(Locks::heap_bitmap_lock_) { collector_->UpdateRef(obj_, offset); } @@ -1455,51 +1455,38 @@ void MarkCompact::CompactPage(mirror::Object* obj, << " start_addr=" << static_cast<void*>(start_addr); }; obj = GetFromSpaceAddr(obj); - live_words_bitmap_->VisitLiveStrides(offset, - black_allocations_begin_, - kPageSize, - [&addr, - &last_stride, - &stride_count, - &last_stride_begin, - verify_obj_callback, - this] (uint32_t stride_begin, - size_t stride_size, - bool /*is_last*/) - REQUIRES_SHARED(Locks::mutator_lock_) { - const size_t stride_in_bytes = stride_size * kAlignment; - DCHECK_LE(stride_in_bytes, kPageSize); - last_stride_begin = stride_begin; - DCHECK(IsAligned<kAlignment>(addr)); - memcpy(addr, - from_space_begin_ + stride_begin * kAlignment, - stride_in_bytes); - if (kIsDebugBuild) { - uint8_t* space_begin = bump_pointer_space_->Begin(); - // We can interpret the first word of the stride as an - // obj only from second stride onwards, as the first - // stride's first-object may have started on previous - // page. The only exception is the first page of the - // moving space. - if (stride_count > 0 - || stride_begin * kAlignment < kPageSize) { - mirror::Object* o = - reinterpret_cast<mirror::Object*>(space_begin - + stride_begin - * kAlignment); - CHECK(live_words_bitmap_->Test(o)) << "ref=" << o; - CHECK(moving_space_bitmap_->Test(o)) - << "ref=" << o - << " bitmap: " - << moving_space_bitmap_->DumpMemAround(o); - VerifyObject(reinterpret_cast<mirror::Object*>(addr), - verify_obj_callback); - } - } - last_stride = addr; - addr += stride_in_bytes; - stride_count++; - }); + live_words_bitmap_->VisitLiveStrides( + offset, + black_allocations_begin_, + kPageSize, + [&addr, &last_stride, &stride_count, &last_stride_begin, verify_obj_callback, this]( + uint32_t stride_begin, size_t stride_size, [[maybe_unused]] bool is_last) + REQUIRES_SHARED(Locks::mutator_lock_) { + const size_t stride_in_bytes = stride_size * kAlignment; + DCHECK_LE(stride_in_bytes, kPageSize); + last_stride_begin = stride_begin; + DCHECK(IsAligned<kAlignment>(addr)); + memcpy(addr, from_space_begin_ + stride_begin * kAlignment, stride_in_bytes); + if (kIsDebugBuild) { + uint8_t* space_begin = bump_pointer_space_->Begin(); + // We can interpret the first word of the stride as an + // obj only from second stride onwards, as the first + // stride's first-object may have started on previous + // page. The only exception is the first page of the + // moving space. + if (stride_count > 0 || stride_begin * kAlignment < kPageSize) { + mirror::Object* o = + reinterpret_cast<mirror::Object*>(space_begin + stride_begin * kAlignment); + CHECK(live_words_bitmap_->Test(o)) << "ref=" << o; + CHECK(moving_space_bitmap_->Test(o)) + << "ref=" << o << " bitmap: " << moving_space_bitmap_->DumpMemAround(o); + VerifyObject(reinterpret_cast<mirror::Object*>(addr), verify_obj_callback); + } + } + last_stride = addr; + addr += stride_in_bytes; + stride_count++; + }); DCHECK_LT(last_stride, start_addr + kPageSize); DCHECK_GT(stride_count, 0u); size_t obj_size = 0; @@ -3580,9 +3567,10 @@ class MarkCompact::ThreadRootsVisitor : public RootVisitor { Flush(); } - void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + void VisitRoots(mirror::Object*** roots, + size_t count, + [[maybe_unused]] const RootInfo& info) override + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { for (size_t i = 0; i < count; i++) { mirror::Object* obj = *roots[i]; if (mark_compact_->MarkObjectNonNullNoPush</*kParallel*/true>(obj)) { @@ -3593,9 +3581,8 @@ class MarkCompact::ThreadRootsVisitor : public RootVisitor { void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + [[maybe_unused]] const RootInfo& info) override + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { for (size_t i = 0; i < count; i++) { mirror::Object* obj = roots[i]->AsMirrorPtr(); if (mark_compact_->MarkObjectNonNullNoPush</*kParallel*/true>(obj)) { @@ -3762,9 +3749,7 @@ class MarkCompact::CardModifiedVisitor { accounting::CardTable* const card_table) : visitor_(mark_compact), bitmap_(bitmap), card_table_(card_table) {} - void operator()(uint8_t* card, - uint8_t expected_value, - uint8_t new_value ATTRIBUTE_UNUSED) const { + void operator()(uint8_t* card, uint8_t expected_value, [[maybe_unused]] uint8_t new_value) const { if (expected_value == accounting::CardTable::kCardDirty) { uintptr_t start = reinterpret_cast<uintptr_t>(card_table_->AddrFromCard(card)); bitmap_->VisitMarkedRange(start, start + accounting::CardTable::kCardSize, visitor_); @@ -3917,9 +3902,8 @@ class MarkCompact::RefFieldsVisitor { ALWAYS_INLINE void operator()(mirror::Object* obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const - REQUIRES(Locks::heap_bitmap_lock_) - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] bool is_static) const + REQUIRES(Locks::heap_bitmap_lock_) REQUIRES_SHARED(Locks::mutator_lock_) { if (kCheckLocks) { Locks::mutator_lock_->AssertSharedHeld(Thread::Current()); Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); @@ -4096,7 +4080,7 @@ mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) { } void MarkCompact::MarkHeapReference(mirror::HeapReference<mirror::Object>* obj, - bool do_atomic_update ATTRIBUTE_UNUSED) { + [[maybe_unused]] bool do_atomic_update) { MarkObject(obj->AsMirrorPtr(), nullptr, MemberOffset(0)); } @@ -4166,7 +4150,7 @@ mirror::Object* MarkCompact::IsMarked(mirror::Object* obj) { } bool MarkCompact::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* obj, - bool do_atomic_update ATTRIBUTE_UNUSED) { + [[maybe_unused]] bool do_atomic_update) { mirror::Object* ref = obj->AsMirrorPtr(); if (ref == nullptr) { return true; diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index 4fefe6557c..52093541cd 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -416,7 +416,7 @@ inline void MarkSweep::MarkObjectNonNullParallel(mirror::Object* obj) { } bool MarkSweep::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref, - bool do_atomic_update ATTRIBUTE_UNUSED) { + [[maybe_unused]] bool do_atomic_update) { mirror::Object* obj = ref->AsMirrorPtr(); if (obj == nullptr) { return true; @@ -558,7 +558,7 @@ inline bool MarkSweep::MarkObjectParallel(mirror::Object* obj) { } void MarkSweep::MarkHeapReference(mirror::HeapReference<mirror::Object>* ref, - bool do_atomic_update ATTRIBUTE_UNUSED) { + [[maybe_unused]] bool do_atomic_update) { MarkObject(ref->AsMirrorPtr(), nullptr, MemberOffset(0)); } @@ -588,7 +588,7 @@ class MarkSweep::VerifyRootMarkedVisitor : public SingleRootVisitor { void MarkSweep::VisitRoots(mirror::Object*** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) { + [[maybe_unused]] const RootInfo& info) { for (size_t i = 0; i < count; ++i) { MarkObjectNonNull(*roots[i]); } @@ -596,7 +596,7 @@ void MarkSweep::VisitRoots(mirror::Object*** roots, void MarkSweep::VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) { + [[maybe_unused]] const RootInfo& info) { for (size_t i = 0; i < count; ++i) { MarkObjectNonNull(roots[i]->AsMirrorPtr()); } @@ -698,8 +698,8 @@ class MarkSweep::MarkStackTask : public Task { : chunk_task_(chunk_task), mark_sweep_(mark_sweep) {} ALWAYS_INLINE void operator()(mirror::Object* obj, - MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const + MemberOffset offset, + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { Mark(obj->GetFieldObject<mirror::Object>(offset)); } @@ -793,8 +793,7 @@ class MarkSweep::MarkStackTask : public Task { } // Scans all of the objects - void Run(Thread* self ATTRIBUTE_UNUSED) override - REQUIRES(Locks::heap_bitmap_lock_) + void Run([[maybe_unused]] Thread* self) override REQUIRES(Locks::heap_bitmap_lock_) REQUIRES_SHARED(Locks::mutator_lock_) { ScanObjectParallelVisitor visitor(this); // TODO: Tune this. @@ -1142,9 +1141,10 @@ class MarkSweep::CheckpointMarkThreadRoots : public Closure, public RootVisitor revoke_ros_alloc_thread_local_buffers_at_checkpoint) { } - void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + void VisitRoots(mirror::Object*** roots, + size_t count, + [[maybe_unused]] const RootInfo& info) override + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { for (size_t i = 0; i < count; ++i) { mark_sweep_->MarkObjectNonNullParallel(*roots[i]); } @@ -1152,9 +1152,8 @@ class MarkSweep::CheckpointMarkThreadRoots : public Closure, public RootVisitor void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + [[maybe_unused]] const RootInfo& info) override + REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { for (size_t i = 0; i < count; ++i) { mark_sweep_->MarkObjectNonNullParallel(roots[i]->AsMirrorPtr()); } @@ -1352,9 +1351,8 @@ class MarkVisitor { ALWAYS_INLINE void operator()(mirror::Object* obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const - REQUIRES(Locks::heap_bitmap_lock_) - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] bool is_static) const + REQUIRES(Locks::heap_bitmap_lock_) REQUIRES_SHARED(Locks::mutator_lock_) { if (kCheckLocks) { Locks::mutator_lock_->AssertSharedHeld(Thread::Current()); Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc index acd4807a4f..a7e2b59184 100644 --- a/runtime/gc/collector/semi_space.cc +++ b/runtime/gc/collector/semi_space.cc @@ -467,12 +467,13 @@ mirror::Object* SemiSpace::MarkObject(mirror::Object* root) { } void SemiSpace::MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr, - bool do_atomic_update ATTRIBUTE_UNUSED) { + [[maybe_unused]] bool do_atomic_update) { MarkObject(obj_ptr); } -void SemiSpace::VisitRoots(mirror::Object*** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) { +void SemiSpace::VisitRoots(mirror::Object*** roots, + size_t count, + [[maybe_unused]] const RootInfo& info) { for (size_t i = 0; i < count; ++i) { auto* root = roots[i]; auto ref = StackReference<mirror::Object>::FromMirrorPtr(*root); @@ -485,8 +486,9 @@ void SemiSpace::VisitRoots(mirror::Object*** roots, size_t count, } } -void SemiSpace::VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) { +void SemiSpace::VisitRoots(mirror::CompressedReference<mirror::Object>** roots, + size_t count, + [[maybe_unused]] const RootInfo& info) { for (size_t i = 0; i < count; ++i) { MarkObjectIfNotInToSpace(roots[i]); } @@ -610,7 +612,7 @@ mirror::Object* SemiSpace::IsMarked(mirror::Object* obj) { bool SemiSpace::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* object, // SemiSpace does the GC in a pause. No CAS needed. - bool do_atomic_update ATTRIBUTE_UNUSED) { + [[maybe_unused]] bool do_atomic_update) { mirror::Object* obj = object->AsMirrorPtr(); if (obj == nullptr) { return true; diff --git a/runtime/gc/collector/sticky_mark_sweep.cc b/runtime/gc/collector/sticky_mark_sweep.cc index d93bd89835..e1bd16c1fb 100644 --- a/runtime/gc/collector/sticky_mark_sweep.cc +++ b/runtime/gc/collector/sticky_mark_sweep.cc @@ -73,7 +73,7 @@ void StickyMarkSweep::MarkConcurrentRoots(VisitRootFlags flags) { static_cast<VisitRootFlags>(flags | kVisitRootFlagClassLoader)); } -void StickyMarkSweep::Sweep(bool swap_bitmaps ATTRIBUTE_UNUSED) { +void StickyMarkSweep::Sweep([[maybe_unused]] bool swap_bitmaps) { SweepArray(GetHeap()->GetLiveStack(), false); } diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h index 922b58870d..0f1a44f4a3 100644 --- a/runtime/gc/heap-inl.h +++ b/runtime/gc/heap-inl.h @@ -441,7 +441,7 @@ inline bool Heap::ShouldAllocLargeObject(ObjPtr<mirror::Class> c, size_t byte_co return byte_count >= large_object_threshold_ && (c->IsPrimitiveArray() || c->IsStringClass()); } -inline bool Heap::IsOutOfMemoryOnAllocation(AllocatorType allocator_type ATTRIBUTE_UNUSED, +inline bool Heap::IsOutOfMemoryOnAllocation([[maybe_unused]] AllocatorType allocator_type, size_t alloc_size, bool grow) { size_t old_target = target_footprint_.load(std::memory_order_relaxed); diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index f27bddb361..381271fded 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -132,7 +132,7 @@ void EnableHeapSamplerCallback(void* enable_ptr, // Disable the heap sampler Callback function used by Perfetto. void DisableHeapSamplerCallback(void* disable_ptr, - const AHeapProfileDisableCallbackInfo* info_ptr ATTRIBUTE_UNUSED) { + [[maybe_unused]] const AHeapProfileDisableCallbackInfo* info_ptr) { HeapSampler* sampler_self = reinterpret_cast<HeapSampler*>(disable_ptr); sampler_self->DisableHeapSampler(); } @@ -2342,7 +2342,7 @@ class ZygoteCompactingCollector final : public collector::SemiSpace { } } - bool ShouldSweepSpace(space::ContinuousSpace* space ATTRIBUTE_UNUSED) const override { + bool ShouldSweepSpace([[maybe_unused]] space::ContinuousSpace* space) const override { // Don't sweep any spaces since we probably blasted the internal accounting of the free list // allocator. return false; @@ -2986,7 +2986,7 @@ class VerifyReferenceVisitor : public SingleRootVisitor { CHECK_EQ(self_, Thread::Current()); } - void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, ObjPtr<mirror::Reference> ref) const + void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) { if (verify_referent_) { VerifyReference(ref.Ptr(), ref->GetReferent(), mirror::Reference::ReferentOffset()); @@ -2995,8 +2995,7 @@ class VerifyReferenceVisitor : public SingleRootVisitor { void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { VerifyReference(obj.Ptr(), obj->GetFieldObject<mirror::Object>(offset), offset); } @@ -3251,9 +3250,9 @@ class VerifyReferenceCardVisitor { } // There is no card marks for native roots on a class. - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const {} - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + void VisitRootIfNonNull( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} + void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} // TODO: Fix lock analysis to not use NO_THREAD_SAFETY_ANALYSIS, requires support for // annotalysis on visitors. @@ -3502,7 +3501,7 @@ void Heap::PreGcVerification(collector::GarbageCollector* gc) { } } -void Heap::PrePauseRosAllocVerification(collector::GarbageCollector* gc ATTRIBUTE_UNUSED) { +void Heap::PrePauseRosAllocVerification([[maybe_unused]] collector::GarbageCollector* gc) { // TODO: Add a new runtime option for this? if (verify_pre_gc_rosalloc_) { RosAllocVerification(current_gc_iteration_.GetTimings(), "PreGcRosAllocVerification"); diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc index 56efcabb7c..f4af50f07e 100644 --- a/runtime/gc/space/image_space.cc +++ b/runtime/gc/space/image_space.cc @@ -332,7 +332,7 @@ class ImageSpace::PatchObjectVisitor final { } template <typename T> - T* operator()(T* ptr, void** dest_addr ATTRIBUTE_UNUSED) const { + T* operator()(T* ptr, [[maybe_unused]] void** dest_addr) const { return (ptr != nullptr) ? native_visitor_(ptr) : nullptr; } @@ -373,9 +373,9 @@ class ImageSpace::PatchObjectVisitor final { this->operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false); } // Ignore class native roots; not called from VisitReferences() for kVisitNativeRoots == false. - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const {} - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + void VisitRootIfNonNull( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} + void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} template <typename T> void VisitNativeDexCacheArray(mirror::NativeArray<T>* array) REQUIRES_SHARED(Locks::mutator_lock_) { @@ -516,8 +516,8 @@ class ImageSpace::RemapInternedStringsVisitor { // Visitor for VisitReferences(). ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> object, MemberOffset field_offset, - bool is_static ATTRIBUTE_UNUSED) - const REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] bool is_static) const + REQUIRES_SHARED(Locks::mutator_lock_) { ObjPtr<mirror::Object> old_value = object->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(field_offset); if (old_value != nullptr && @@ -538,9 +538,9 @@ class ImageSpace::RemapInternedStringsVisitor { this->operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false); } // Ignore class native roots; not called from VisitReferences() for kVisitNativeRoots == false. - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const {} - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + void VisitRootIfNonNull( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} + void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} private: mirror::Class* GetStringClass() REQUIRES_SHARED(Locks::mutator_lock_) { @@ -1179,15 +1179,14 @@ class ImageSpace::Loader { // Fix up separately since we also need to fix up method entrypoints. ALWAYS_INLINE void VisitRootIfNonNull( - mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} - ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const {} + ALWAYS_INLINE void VisitRoot( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} ALWAYS_INLINE void operator()(ObjPtr<mirror::Object> obj, MemberOffset offset, - bool is_static ATTRIBUTE_UNUSED) const - NO_THREAD_SAFETY_ANALYSIS { + [[maybe_unused]] bool is_static) const NO_THREAD_SAFETY_ANALYSIS { // Space is not yet added to the heap, don't do a read barrier. mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>( offset); @@ -1898,7 +1897,7 @@ bool ImageSpace::BootImageLayout::CompileBootclasspathElements( // TODO: Rewrite ProfileCompilationInfo to provide a better interface and // to store the dex locations in uncompressed section of the file. auto collect_fn = [&dex_locations](const std::string& dex_location, - uint32_t checksum ATTRIBUTE_UNUSED) { + [[maybe_unused]] uint32_t checksum) { dex_locations.insert(dex_location); // Just collect locations. return false; // Do not read the profile data. }; @@ -2188,8 +2187,8 @@ bool ImageSpace::BootImageLayout::LoadFromSystem(InstructionSet image_isa, bool allow_in_memory_compilation, /*out*/ std::string* error_msg) { auto filename_fn = [image_isa](const std::string& location, - /*out*/std::string* filename, - /*out*/std::string* err_msg ATTRIBUTE_UNUSED) { + /*out*/ std::string* filename, + [[maybe_unused]] /*out*/ std::string* err_msg) { *filename = GetSystemImageFilename(location.c_str(), image_isa); return true; }; diff --git a/runtime/gc/space/large_object_space.cc b/runtime/gc/space/large_object_space.cc index f1df45f19a..80ed9b356d 100644 --- a/runtime/gc/space/large_object_space.cc +++ b/runtime/gc/space/large_object_space.cc @@ -585,7 +585,7 @@ void FreeListSpace::Dump(std::ostream& os) const { } } -bool FreeListSpace::IsZygoteLargeObject(Thread* self ATTRIBUTE_UNUSED, mirror::Object* obj) const { +bool FreeListSpace::IsZygoteLargeObject([[maybe_unused]] Thread* self, mirror::Object* obj) const { const AllocationInfo* info = GetAllocationInfoForAddress(reinterpret_cast<uintptr_t>(obj)); DCHECK(info != nullptr); return info->IsZygoteObject(); diff --git a/runtime/gc/space/memory_tool_malloc_space.h b/runtime/gc/space/memory_tool_malloc_space.h index 33bddfa4c8..ce72b5b435 100644 --- a/runtime/gc/space/memory_tool_malloc_space.h +++ b/runtime/gc/space/memory_tool_malloc_space.h @@ -48,7 +48,7 @@ class MemoryToolMallocSpace final : public BaseMallocSpaceType { size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) override REQUIRES_SHARED(Locks::mutator_lock_); - void RegisterRecentFree(mirror::Object* ptr ATTRIBUTE_UNUSED) override {} + void RegisterRecentFree([[maybe_unused]] mirror::Object* ptr) override {} size_t MaxBytesBulkAllocatedFor(size_t num_bytes) override; diff --git a/runtime/gc/space/region_space-inl.h b/runtime/gc/space/region_space-inl.h index 1026f42c27..4376137cb4 100644 --- a/runtime/gc/space/region_space-inl.h +++ b/runtime/gc/space/region_space-inl.h @@ -26,7 +26,7 @@ namespace art { namespace gc { namespace space { -inline mirror::Object* RegionSpace::Alloc(Thread* self ATTRIBUTE_UNUSED, +inline mirror::Object* RegionSpace::Alloc([[maybe_unused]] Thread* self, size_t num_bytes, /* out */ size_t* bytes_allocated, /* out */ size_t* usable_size, diff --git a/runtime/gc/space/zygote_space.cc b/runtime/gc/space/zygote_space.cc index c5e3a7046f..f40061ff88 100644 --- a/runtime/gc/space/zygote_space.cc +++ b/runtime/gc/space/zygote_space.cc @@ -34,9 +34,7 @@ class CountObjectsAllocated { explicit CountObjectsAllocated(size_t* objects_allocated) : objects_allocated_(objects_allocated) {} - void operator()(mirror::Object* obj ATTRIBUTE_UNUSED) const { - ++*objects_allocated_; - } + void operator()([[maybe_unused]] mirror::Object* obj) const { ++*objects_allocated_; } private: size_t* const objects_allocated_; diff --git a/runtime/gc/system_weak.h b/runtime/gc/system_weak.h index 77b9548211..57d593c756 100644 --- a/runtime/gc/system_weak.h +++ b/runtime/gc/system_weak.h @@ -62,7 +62,7 @@ class SystemWeakHolder : public AbstractSystemWeakHolder { allow_new_system_weak_ = false; } - void Broadcast(bool broadcast_for_checkpoint ATTRIBUTE_UNUSED) override + void Broadcast([[maybe_unused]] bool broadcast_for_checkpoint) override REQUIRES(!allow_disallow_lock_) { MutexLock mu(Thread::Current(), allow_disallow_lock_); new_weak_condition_.Broadcast(Thread::Current()); diff --git a/runtime/gc/task_processor_test.cc b/runtime/gc/task_processor_test.cc index 7cb678ba7a..a3666e0a25 100644 --- a/runtime/gc/task_processor_test.cc +++ b/runtime/gc/task_processor_test.cc @@ -105,7 +105,7 @@ class TestOrderTask : public HeapTask { TestOrderTask(uint64_t expected_time, size_t expected_counter, size_t* counter) : HeapTask(expected_time), expected_counter_(expected_counter), counter_(counter) { } - void Run(Thread* thread ATTRIBUTE_UNUSED) override { + void Run([[maybe_unused]] Thread* thread) override { ASSERT_EQ(*counter_, expected_counter_); ++*counter_; } diff --git a/runtime/gc/verification.cc b/runtime/gc/verification.cc index 195986f04d..ad04860c57 100644 --- a/runtime/gc/verification.cc +++ b/runtime/gc/verification.cc @@ -133,7 +133,7 @@ class Verification::BFSFindReachable { public: explicit BFSFindReachable(ObjectSet* visited) : visited_(visited) {} - void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const + void operator()(mirror::Object* obj, MemberOffset offset, [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) { ArtField* field = obj->FindFieldByOffset(offset); Visit(obj->GetFieldObject<mirror::Object>(offset), diff --git a/runtime/handle_scope.h b/runtime/handle_scope.h index a43e889111..0c50312203 100644 --- a/runtime/handle_scope.h +++ b/runtime/handle_scope.h @@ -122,9 +122,7 @@ class PACKED(4) HandleScope : public BaseHandleScope { ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const; // Offset of link within HandleScope, used by generated code. - static constexpr size_t LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED) { - return 0; - } + static constexpr size_t LinkOffset([[maybe_unused]] PointerSize pointer_size) { return 0; } // Offset of length within handle scope, used by generated code. static constexpr size_t NumberOfReferencesOffset(PointerSize pointer_size) { diff --git a/runtime/hprof/hprof.cc b/runtime/hprof/hprof.cc index 5e4a5f3ae3..ea64cc13ce 100644 --- a/runtime/hprof/hprof.cc +++ b/runtime/hprof/hprof.cc @@ -237,7 +237,7 @@ class EndianOutput { HandleU4List(values, count); length_ += count * sizeof(uint32_t); } - virtual void UpdateU4(size_t offset, uint32_t new_value ATTRIBUTE_UNUSED) { + virtual void UpdateU4(size_t offset, [[maybe_unused]] uint32_t new_value) { DCHECK_LE(offset, length_ - 4); } void AddU8List(const uint64_t* values, size_t count) { @@ -271,21 +271,16 @@ class EndianOutput { } protected: - virtual void HandleU1List(const uint8_t* values ATTRIBUTE_UNUSED, - size_t count ATTRIBUTE_UNUSED) { - } - virtual void HandleU1AsU2List(const uint8_t* values ATTRIBUTE_UNUSED, - size_t count ATTRIBUTE_UNUSED) { - } - virtual void HandleU2List(const uint16_t* values ATTRIBUTE_UNUSED, - size_t count ATTRIBUTE_UNUSED) { - } - virtual void HandleU4List(const uint32_t* values ATTRIBUTE_UNUSED, - size_t count ATTRIBUTE_UNUSED) { - } - virtual void HandleU8List(const uint64_t* values ATTRIBUTE_UNUSED, - size_t count ATTRIBUTE_UNUSED) { - } + virtual void HandleU1List([[maybe_unused]] const uint8_t* values, [[maybe_unused]] size_t count) { + } + virtual void HandleU1AsU2List([[maybe_unused]] const uint8_t* values, + [[maybe_unused]] size_t count) {} + virtual void HandleU2List([[maybe_unused]] const uint16_t* values, + [[maybe_unused]] size_t count) {} + virtual void HandleU4List([[maybe_unused]] const uint32_t* values, + [[maybe_unused]] size_t count) {} + virtual void HandleU8List([[maybe_unused]] const uint64_t* values, + [[maybe_unused]] size_t count) {} virtual void HandleEndRecord() { } @@ -382,7 +377,7 @@ class EndianOutputBuffered : public EndianOutput { buffer_.clear(); } - virtual void HandleFlush(const uint8_t* buffer ATTRIBUTE_UNUSED, size_t length ATTRIBUTE_UNUSED) { + virtual void HandleFlush([[maybe_unused]] const uint8_t* buffer, [[maybe_unused]] size_t length) { } std::vector<uint8_t> buffer_; @@ -743,7 +738,7 @@ class Hprof : public SingleRootVisitor { } } - bool DumpToDdmsBuffered(size_t overall_size ATTRIBUTE_UNUSED, size_t max_length ATTRIBUTE_UNUSED) + bool DumpToDdmsBuffered([[maybe_unused]] size_t overall_size, [[maybe_unused]] size_t max_length) REQUIRES(Locks::mutator_lock_) { LOG(FATAL) << "Unimplemented"; UNREACHABLE(); diff --git a/runtime/image.h b/runtime/image.h index 324cd3c6d7..5580e273c5 100644 --- a/runtime/image.h +++ b/runtime/image.h @@ -274,7 +274,7 @@ class PACKED(8) ImageHeader { kSectionCount, // Number of elements in enum. }; - static size_t NumberOfImageRoots(bool app_image ATTRIBUTE_UNUSED) { + static size_t NumberOfImageRoots([[maybe_unused]] bool app_image) { // At the moment, boot image and app image have the same number of roots, // though the meaning of the kSpecialRoots is different. return kImageRootsMax; diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc index 5ce2b10afc..ba0d63dde9 100644 --- a/runtime/instrumentation.cc +++ b/runtime/instrumentation.cc @@ -1073,7 +1073,7 @@ void Instrumentation::UpdateStubs() { } } -static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) { +static void ResetQuickAllocEntryPointsForThread(Thread* thread, [[maybe_unused]] void* arg) { thread->ResetQuickAllocEntryPointsForThread(); } diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h index 144ee09e39..7676080efe 100644 --- a/runtime/instrumentation.h +++ b/runtime/instrumentation.h @@ -149,8 +149,8 @@ struct InstrumentationListener { // Call-back when a shadow_frame with the needs_notify_pop_ boolean set is popped off the stack by // either return or exceptions. Normally instrumentation listeners should ensure that there are // shadow-frames by deoptimizing stacks. - virtual void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED, - const ShadowFrame& frame ATTRIBUTE_UNUSED) + virtual void WatchedFramePop([[maybe_unused]] Thread* thread, + [[maybe_unused]] const ShadowFrame& frame) REQUIRES_SHARED(Locks::mutator_lock_) = 0; }; diff --git a/runtime/instrumentation_test.cc b/runtime/instrumentation_test.cc index fc052981de..1e98e57c2a 100644 --- a/runtime/instrumentation_test.cc +++ b/runtime/instrumentation_test.cc @@ -55,93 +55,93 @@ class TestInstrumentationListener final : public instrumentation::Instrumentatio virtual ~TestInstrumentationListener() {} - void MethodEntered(Thread* thread ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED) override + void MethodEntered([[maybe_unused]] Thread* thread, [[maybe_unused]] ArtMethod* method) override REQUIRES_SHARED(Locks::mutator_lock_) { received_method_enter_event = true; } - void MethodExited(Thread* thread ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED, - MutableHandle<mirror::Object>& return_value ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void MethodExited([[maybe_unused]] Thread* thread, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] instrumentation::OptionalFrame frame, + [[maybe_unused]] MutableHandle<mirror::Object>& return_value) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_method_exit_object_event = true; } - void MethodExited(Thread* thread ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED, - JValue& return_value ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void MethodExited([[maybe_unused]] Thread* thread, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] instrumentation::OptionalFrame frame, + [[maybe_unused]] JValue& return_value) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_method_exit_event = true; } - void MethodUnwind(Thread* thread ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void MethodUnwind([[maybe_unused]] Thread* thread, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_method_unwind_event = true; } - void DexPcMoved(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Object> this_object ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t new_dex_pc ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void DexPcMoved([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Object> this_object, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] uint32_t new_dex_pc) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_dex_pc_moved_event = true; } - void FieldRead(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Object> this_object ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - ArtField* field ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void FieldRead([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Object> this_object, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] ArtField* field) override REQUIRES_SHARED(Locks::mutator_lock_) { received_field_read_event = true; } - void FieldWritten(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Object> this_object ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - ArtField* field ATTRIBUTE_UNUSED, - Handle<mirror::Object> field_value ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void FieldWritten([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Object> this_object, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] ArtField* field, + [[maybe_unused]] Handle<mirror::Object> field_value) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_field_written_object_event = true; } - void FieldWritten(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Object> this_object ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - ArtField* field ATTRIBUTE_UNUSED, - const JValue& field_value ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void FieldWritten([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Object> this_object, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] ArtField* field, + [[maybe_unused]] const JValue& field_value) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_field_written_event = true; } - void ExceptionThrown(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Throwable> exception_object ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void ExceptionThrown([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Throwable> exception_object) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_exception_thrown_event = true; } - void ExceptionHandled(Thread* self ATTRIBUTE_UNUSED, - Handle<mirror::Throwable> throwable ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void ExceptionHandled([[maybe_unused]] Thread* self, + [[maybe_unused]] Handle<mirror::Throwable> throwable) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_exception_handled_event = true; } - void Branch(Thread* thread ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - int32_t dex_pc_offset ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void Branch([[maybe_unused]] Thread* thread, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] int32_t dex_pc_offset) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_branch_event = true; } - void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED, const ShadowFrame& frame ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void WatchedFramePop([[maybe_unused]] Thread* thread, + [[maybe_unused]] const ShadowFrame& frame) override + REQUIRES_SHARED(Locks::mutator_lock_) { received_watched_frame_pop = true; } diff --git a/runtime/interpreter/unstarted_runtime.cc b/runtime/interpreter/unstarted_runtime.cc index 32ed4300b7..8d3d2d6a3d 100644 --- a/runtime/interpreter/unstarted_runtime.cc +++ b/runtime/interpreter/unstarted_runtime.cc @@ -813,8 +813,10 @@ static void PrimitiveArrayCopy(Thread* self, } } -void UnstartedRuntime::UnstartedSystemArraycopy( - Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) { +void UnstartedRuntime::UnstartedSystemArraycopy(Thread* self, + ShadowFrame* shadow_frame, + [[maybe_unused]] JValue* result, + size_t arg_offset) { // Special case array copying without initializing System. jint src_pos = shadow_frame->GetVReg(arg_offset + 1); jint dst_pos = shadow_frame->GetVReg(arg_offset + 3); @@ -930,9 +932,10 @@ void UnstartedRuntime::UnstartedSystemArraycopyInt( UnstartedRuntime::UnstartedSystemArraycopy(self, shadow_frame, result, arg_offset); } -void UnstartedRuntime::UnstartedSystemGetSecurityManager( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame ATTRIBUTE_UNUSED, - JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) { +void UnstartedRuntime::UnstartedSystemGetSecurityManager([[maybe_unused]] Thread* self, + [[maybe_unused]] ShadowFrame* shadow_frame, + JValue* result, + [[maybe_unused]] size_t arg_offset) { result->SetL(nullptr); } @@ -1089,8 +1092,10 @@ static ObjPtr<mirror::Object> CreateInstanceOf(Thread* self, const char* class_d return nullptr; } -void UnstartedRuntime::UnstartedThreadLocalGet( - Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) { +void UnstartedRuntime::UnstartedThreadLocalGet(Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + [[maybe_unused]] size_t arg_offset) { if (CheckCallers(shadow_frame, { "jdk.internal.math.FloatingDecimal$BinaryToASCIIBuffer " "jdk.internal.math.FloatingDecimal.getBinaryToASCIIBuffer()" })) { result->SetL(CreateInstanceOf(self, "Ljdk/internal/math/FloatingDecimal$BinaryToASCIIBuffer;")); @@ -1101,8 +1106,10 @@ void UnstartedRuntime::UnstartedThreadLocalGet( } } -void UnstartedRuntime::UnstartedThreadCurrentThread( - Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) { +void UnstartedRuntime::UnstartedThreadCurrentThread(Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + [[maybe_unused]] size_t arg_offset) { if (CheckCallers(shadow_frame, { "void java.lang.Thread.<init>(java.lang.ThreadGroup, java.lang.Runnable, " "java.lang.String, long, java.security.AccessControlContext, boolean)", @@ -1131,8 +1138,10 @@ void UnstartedRuntime::UnstartedThreadCurrentThread( } } -void UnstartedRuntime::UnstartedThreadGetNativeState( - Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) { +void UnstartedRuntime::UnstartedThreadGetNativeState(Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + [[maybe_unused]] size_t arg_offset) { if (CheckCallers(shadow_frame, { "java.lang.Thread$State java.lang.Thread.getState()", "java.lang.ThreadGroup java.lang.Thread.getThreadGroup()", @@ -1154,45 +1163,61 @@ void UnstartedRuntime::UnstartedThreadGetNativeState( } } -void UnstartedRuntime::UnstartedMathCeil( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMathCeil([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { result->SetD(ceil(shadow_frame->GetVRegDouble(arg_offset))); } -void UnstartedRuntime::UnstartedMathFloor( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMathFloor([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { result->SetD(floor(shadow_frame->GetVRegDouble(arg_offset))); } -void UnstartedRuntime::UnstartedMathSin( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMathSin([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { result->SetD(sin(shadow_frame->GetVRegDouble(arg_offset))); } -void UnstartedRuntime::UnstartedMathCos( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMathCos([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { result->SetD(cos(shadow_frame->GetVRegDouble(arg_offset))); } -void UnstartedRuntime::UnstartedMathPow( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMathPow([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { result->SetD(pow(shadow_frame->GetVRegDouble(arg_offset), shadow_frame->GetVRegDouble(arg_offset + 2))); } -void UnstartedRuntime::UnstartedMathTan( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMathTan([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { result->SetD(tan(shadow_frame->GetVRegDouble(arg_offset))); } -void UnstartedRuntime::UnstartedObjectHashCode( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedObjectHashCode([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { mirror::Object* obj = shadow_frame->GetVRegReference(arg_offset); result->SetI(obj->IdentityHashCode()); } -void UnstartedRuntime::UnstartedDoubleDoubleToRawLongBits( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedDoubleDoubleToRawLongBits([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { double in = shadow_frame->GetVRegDouble(arg_offset); result->SetJ(bit_cast<int64_t, double>(in)); } @@ -1240,23 +1265,31 @@ static void UnstartedMemoryPeek( UNREACHABLE(); } -void UnstartedRuntime::UnstartedMemoryPeekByte( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMemoryPeekByte([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { UnstartedMemoryPeek(Primitive::kPrimByte, shadow_frame, result, arg_offset); } -void UnstartedRuntime::UnstartedMemoryPeekShort( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMemoryPeekShort([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { UnstartedMemoryPeek(Primitive::kPrimShort, shadow_frame, result, arg_offset); } -void UnstartedRuntime::UnstartedMemoryPeekInt( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMemoryPeekInt([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { UnstartedMemoryPeek(Primitive::kPrimInt, shadow_frame, result, arg_offset); } -void UnstartedRuntime::UnstartedMemoryPeekLong( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) { +void UnstartedRuntime::UnstartedMemoryPeekLong([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) { UnstartedMemoryPeek(Primitive::kPrimLong, shadow_frame, result, arg_offset); } @@ -1309,14 +1342,18 @@ static void UnstartedMemoryPeekArray( UNREACHABLE(); } -void UnstartedRuntime::UnstartedMemoryPeekByteArray( - Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) { +void UnstartedRuntime::UnstartedMemoryPeekByteArray(Thread* self, + ShadowFrame* shadow_frame, + [[maybe_unused]] JValue* result, + size_t arg_offset) { UnstartedMemoryPeekArray(Primitive::kPrimByte, self, shadow_frame, arg_offset); } // This allows reading the new style of String objects during compilation. -void UnstartedRuntime::UnstartedStringGetCharsNoCheck( - Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) { +void UnstartedRuntime::UnstartedStringGetCharsNoCheck(Thread* self, + ShadowFrame* shadow_frame, + [[maybe_unused]] JValue* result, + size_t arg_offset) { jint start = shadow_frame->GetVReg(arg_offset + 1); jint end = shadow_frame->GetVReg(arg_offset + 2); jint index = shadow_frame->GetVReg(arg_offset + 4); @@ -1477,8 +1514,10 @@ void UnstartedRuntime::UnstartedReferenceRefersTo( // where we can predict the behavior (somewhat). // Note: this is required (instead of lazy initialization) as these classes are used in the static // initialization of other classes, so will *use* the value. -void UnstartedRuntime::UnstartedRuntimeAvailableProcessors( - Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset ATTRIBUTE_UNUSED) { +void UnstartedRuntime::UnstartedRuntimeAvailableProcessors(Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + [[maybe_unused]] size_t arg_offset) { if (CheckCallers(shadow_frame, { "void java.util.concurrent.SynchronousQueue.<clinit>()" })) { // SynchronousQueue really only separates between single- and multiprocessor case. Return // 8 as a conservative upper approximation. @@ -1628,8 +1667,10 @@ void UnstartedRuntime::UnstartedJdkUnsafeGetObjectVolatile( result->SetL(value); } -void UnstartedRuntime::UnstartedJdkUnsafePutObjectVolatile( - Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) +void UnstartedRuntime::UnstartedJdkUnsafePutObjectVolatile(Thread* self, + ShadowFrame* shadow_frame, + [[maybe_unused]] JValue* result, + size_t arg_offset) REQUIRES_SHARED(Locks::mutator_lock_) { // Argument 0 is the Unsafe instance, skip. mirror::Object* obj = shadow_frame->GetVRegReference(arg_offset + 1); @@ -1650,8 +1691,10 @@ void UnstartedRuntime::UnstartedJdkUnsafePutObjectVolatile( } } -void UnstartedRuntime::UnstartedJdkUnsafePutOrderedObject( - Thread* self, ShadowFrame* shadow_frame, JValue* result ATTRIBUTE_UNUSED, size_t arg_offset) +void UnstartedRuntime::UnstartedJdkUnsafePutOrderedObject(Thread* self, + ShadowFrame* shadow_frame, + [[maybe_unused]] JValue* result, + size_t arg_offset) REQUIRES_SHARED(Locks::mutator_lock_) { // Argument 0 is the Unsafe instance, skip. mirror::Object* obj = shadow_frame->GetVRegReference(arg_offset + 1); @@ -1799,8 +1842,10 @@ void UnstartedRuntime::UnstartedMethodInvoke( } } -void UnstartedRuntime::UnstartedSystemIdentityHashCode( - Thread* self ATTRIBUTE_UNUSED, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset) +void UnstartedRuntime::UnstartedSystemIdentityHashCode([[maybe_unused]] Thread* self, + ShadowFrame* shadow_frame, + JValue* result, + size_t arg_offset) REQUIRES_SHARED(Locks::mutator_lock_) { mirror::Object* obj = shadow_frame->GetVRegReference(arg_offset); result->SetI((obj != nullptr) ? obj->IdentityHashCode() : 0); @@ -1810,9 +1855,11 @@ void UnstartedRuntime::UnstartedSystemIdentityHashCode( // java.lang.invoke.VarHandle clinit. The clinit determines sets of // available VarHandle accessors and these differ based on machine // word size. -void UnstartedRuntime::UnstartedJNIVMRuntimeIs64Bit( - Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args ATTRIBUTE_UNUSED, JValue* result) { +void UnstartedRuntime::UnstartedJNIVMRuntimeIs64Bit([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); jboolean is64bit = (pointer_size == PointerSize::k64) ? JNI_TRUE : JNI_FALSE; result->SetZ(is64bit); @@ -1820,8 +1867,8 @@ void UnstartedRuntime::UnstartedJNIVMRuntimeIs64Bit( void UnstartedRuntime::UnstartedJNIVMRuntimeNewUnpaddedArray( Thread* self, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, uint32_t* args, JValue* result) { int32_t length = args[1]; @@ -1841,14 +1888,19 @@ void UnstartedRuntime::UnstartedJNIVMRuntimeNewUnpaddedArray( } void UnstartedRuntime::UnstartedJNIVMStackGetCallingClassLoader( - Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args ATTRIBUTE_UNUSED, JValue* result) { + [[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { result->SetL(nullptr); } -void UnstartedRuntime::UnstartedJNIVMStackGetStackClass2( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, - uint32_t* args ATTRIBUTE_UNUSED, JValue* result) { +void UnstartedRuntime::UnstartedJNIVMStackGetStackClass2(Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { NthCallerVisitor visitor(self, 3); visitor.WalkStack(); if (visitor.caller != nullptr) { @@ -1856,75 +1908,91 @@ void UnstartedRuntime::UnstartedJNIVMStackGetStackClass2( } } -void UnstartedRuntime::UnstartedJNIMathLog( - Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) { +void UnstartedRuntime::UnstartedJNIMathLog([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + JValue* result) { JValue value; value.SetJ((static_cast<uint64_t>(args[1]) << 32) | args[0]); result->SetD(log(value.GetD())); } -void UnstartedRuntime::UnstartedJNIMathExp( - Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) { +void UnstartedRuntime::UnstartedJNIMathExp([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + JValue* result) { JValue value; value.SetJ((static_cast<uint64_t>(args[1]) << 32) | args[0]); result->SetD(exp(value.GetD())); } void UnstartedRuntime::UnstartedJNIAtomicLongVMSupportsCS8( - Thread* self ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, - uint32_t* args ATTRIBUTE_UNUSED, + [[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, JValue* result) { result->SetZ(QuasiAtomic::LongAtomicsUseMutexes(Runtime::Current()->GetInstructionSet()) ? 0 : 1); } -void UnstartedRuntime::UnstartedJNIClassGetNameNative( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver, - uint32_t* args ATTRIBUTE_UNUSED, JValue* result) { +void UnstartedRuntime::UnstartedJNIClassGetNameNative(Thread* self, + [[maybe_unused]] ArtMethod* method, + mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { StackHandleScope<1> hs(self); result->SetL(mirror::Class::ComputeName(hs.NewHandle(receiver->AsClass()))); } -void UnstartedRuntime::UnstartedJNIDoubleLongBitsToDouble( - Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) { +void UnstartedRuntime::UnstartedJNIDoubleLongBitsToDouble([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + JValue* result) { uint64_t long_input = args[0] | (static_cast<uint64_t>(args[1]) << 32); result->SetD(bit_cast<double>(long_input)); } -void UnstartedRuntime::UnstartedJNIFloatFloatToRawIntBits( - Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) { +void UnstartedRuntime::UnstartedJNIFloatFloatToRawIntBits([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + JValue* result) { result->SetI(args[0]); } -void UnstartedRuntime::UnstartedJNIFloatIntBitsToFloat( - Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, uint32_t* args, JValue* result) { +void UnstartedRuntime::UnstartedJNIFloatIntBitsToFloat([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + JValue* result) { result->SetI(args[0]); } -void UnstartedRuntime::UnstartedJNIObjectInternalClone( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver, - uint32_t* args ATTRIBUTE_UNUSED, JValue* result) { +void UnstartedRuntime::UnstartedJNIObjectInternalClone(Thread* self, + [[maybe_unused]] ArtMethod* method, + mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { StackHandleScope<1> hs(self); Handle<mirror::Object> h_receiver = hs.NewHandle(receiver); result->SetL(mirror::Object::Clone(h_receiver, self)); } -void UnstartedRuntime::UnstartedJNIObjectNotifyAll( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver, - uint32_t* args ATTRIBUTE_UNUSED, JValue* result ATTRIBUTE_UNUSED) { +void UnstartedRuntime::UnstartedJNIObjectNotifyAll(Thread* self, + [[maybe_unused]] ArtMethod* method, + mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + [[maybe_unused]] JValue* result) { receiver->NotifyAll(self); } void UnstartedRuntime::UnstartedJNIStringCompareTo(Thread* self, - ArtMethod* method ATTRIBUTE_UNUSED, + [[maybe_unused]] ArtMethod* method, mirror::Object* receiver, uint32_t* args, JValue* result) { @@ -1936,9 +2004,11 @@ void UnstartedRuntime::UnstartedJNIStringCompareTo(Thread* self, result->SetI(receiver->AsString()->CompareTo(rhs->AsString())); } -void UnstartedRuntime::UnstartedJNIStringFillBytesLatin1( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver, uint32_t* args, JValue* ATTRIBUTE_UNUSED) { +void UnstartedRuntime::UnstartedJNIStringFillBytesLatin1(Thread* self, + [[maybe_unused]] ArtMethod* method, + mirror::Object* receiver, + uint32_t* args, + [[maybe_unused]] JValue*) { StackHandleScope<2> hs(self); Handle<mirror::String> h_receiver(hs.NewHandle( reinterpret_cast<mirror::String*>(receiver)->AsString())); @@ -1948,9 +2018,11 @@ void UnstartedRuntime::UnstartedJNIStringFillBytesLatin1( h_receiver->FillBytesLatin1(h_buffer, index); } -void UnstartedRuntime::UnstartedJNIStringFillBytesUTF16( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver, uint32_t* args, JValue* ATTRIBUTE_UNUSED) { +void UnstartedRuntime::UnstartedJNIStringFillBytesUTF16(Thread* self, + [[maybe_unused]] ArtMethod* method, + mirror::Object* receiver, + uint32_t* args, + [[maybe_unused]] JValue*) { StackHandleScope<2> hs(self); Handle<mirror::String> h_receiver(hs.NewHandle( reinterpret_cast<mirror::String*>(receiver)->AsString())); @@ -1960,24 +2032,30 @@ void UnstartedRuntime::UnstartedJNIStringFillBytesUTF16( h_receiver->FillBytesUTF16(h_buffer, index); } -void UnstartedRuntime::UnstartedJNIStringIntern( - Thread* self ATTRIBUTE_UNUSED, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver, - uint32_t* args ATTRIBUTE_UNUSED, JValue* result) { +void UnstartedRuntime::UnstartedJNIStringIntern([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { result->SetL(receiver->AsString()->Intern()); } -void UnstartedRuntime::UnstartedJNIArrayCreateMultiArray( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, - uint32_t* args, JValue* result) { +void UnstartedRuntime::UnstartedJNIArrayCreateMultiArray(Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + JValue* result) { StackHandleScope<2> hs(self); auto h_class(hs.NewHandle(reinterpret_cast<mirror::Class*>(args[0])->AsClass())); auto h_dimensions(hs.NewHandle(reinterpret_cast<mirror::IntArray*>(args[1])->AsIntArray())); result->SetL(mirror::Array::CreateMultiArray(self, h_class, h_dimensions)); } -void UnstartedRuntime::UnstartedJNIArrayCreateObjectArray( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, - uint32_t* args, JValue* result) { +void UnstartedRuntime::UnstartedJNIArrayCreateObjectArray(Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + JValue* result) { int32_t length = static_cast<int32_t>(args[1]); if (length < 0) { ThrowNegativeArraySizeException(length); @@ -1998,8 +2076,11 @@ void UnstartedRuntime::UnstartedJNIArrayCreateObjectArray( } void UnstartedRuntime::UnstartedJNIThrowableNativeFillInStackTrace( - Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED, - uint32_t* args ATTRIBUTE_UNUSED, JValue* result) { + Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { ScopedObjectAccessUnchecked soa(self); ScopedLocalRef<jobject> stack_trace(self->GetJniEnv(), self->CreateInternalStackTrace(soa)); result->SetL(soa.Decode<mirror::Object>(stack_trace.get())); @@ -2048,19 +2129,18 @@ void UnstartedRuntime::UnstartedJNIUnsafeGetArrayIndexScaleForComponentType( UnstartedJNIJdkUnsafeGetArrayIndexScaleForComponentType(self, method, receiver, args, result); } -void UnstartedRuntime::UnstartedJNIJdkUnsafeAddressSize( - Thread* self ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, - uint32_t* args ATTRIBUTE_UNUSED, - JValue* result) { +void UnstartedRuntime::UnstartedJNIJdkUnsafeAddressSize([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { result->SetI(sizeof(void*)); } void UnstartedRuntime::UnstartedJNIJdkUnsafeCompareAndSwapInt( Thread* self, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, uint32_t* args, JValue* result) { ObjPtr<mirror::Object> obj = reinterpret_cast32<mirror::Object*>(args[0]); @@ -2101,11 +2181,12 @@ void UnstartedRuntime::UnstartedJNIJdkUnsafeCompareAndSetInt( UnstartedJNIJdkUnsafeCompareAndSwapInt(self, method, receiver, args, result); } -void UnstartedRuntime::UnstartedJNIJdkUnsafeGetIntVolatile(Thread* self, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, - uint32_t* args, - JValue* result) { +void UnstartedRuntime::UnstartedJNIJdkUnsafeGetIntVolatile( + Thread* self, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + JValue* result) { ObjPtr<mirror::Object> obj = reinterpret_cast32<mirror::Object*>(args[0]); if (obj == nullptr) { AbortTransactionOrFail(self, "Unsafe.compareAndSwapIntVolatile with null object."); @@ -2117,10 +2198,10 @@ void UnstartedRuntime::UnstartedJNIJdkUnsafeGetIntVolatile(Thread* self, } void UnstartedRuntime::UnstartedJNIJdkUnsafePutObject(Thread* self, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, - uint32_t* args, - JValue* result ATTRIBUTE_UNUSED) { + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, + uint32_t* args, + [[maybe_unused]] JValue* result) { ObjPtr<mirror::Object> obj = reinterpret_cast32<mirror::Object*>(args[0]); if (obj == nullptr) { AbortTransactionOrFail(self, "Unsafe.putObject with null object."); @@ -2141,8 +2222,8 @@ void UnstartedRuntime::UnstartedJNIJdkUnsafePutObject(Thread* self, void UnstartedRuntime::UnstartedJNIJdkUnsafeGetArrayBaseOffsetForComponentType( Thread* self, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, uint32_t* args, JValue* result) { ObjPtr<mirror::Object> component = reinterpret_cast32<mirror::Object*>(args[0]); @@ -2156,8 +2237,8 @@ void UnstartedRuntime::UnstartedJNIJdkUnsafeGetArrayBaseOffsetForComponentType( void UnstartedRuntime::UnstartedJNIJdkUnsafeGetArrayIndexScaleForComponentType( Thread* self, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver ATTRIBUTE_UNUSED, + [[maybe_unused]] ArtMethod* method, + [[maybe_unused]] mirror::Object* receiver, uint32_t* args, JValue* result) { ObjPtr<mirror::Object> component = reinterpret_cast32<mirror::Object*>(args[0]); @@ -2169,23 +2250,21 @@ void UnstartedRuntime::UnstartedJNIJdkUnsafeGetArrayIndexScaleForComponentType( result->SetI(Primitive::ComponentSize(primitive_type)); } -void UnstartedRuntime::UnstartedJNIFieldGetArtField( - Thread* self ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver, - uint32_t* args ATTRIBUTE_UNUSED, - JValue* result) { +void UnstartedRuntime::UnstartedJNIFieldGetArtField([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { ObjPtr<mirror::Field> field = ObjPtr<mirror::Field>::DownCast(receiver); ArtField* art_field = field->GetArtField(); result->SetJ(reinterpret_cast<int64_t>(art_field)); } -void UnstartedRuntime::UnstartedJNIFieldGetNameInternal( - Thread* self ATTRIBUTE_UNUSED, - ArtMethod* method ATTRIBUTE_UNUSED, - mirror::Object* receiver, - uint32_t* args ATTRIBUTE_UNUSED, - JValue* result) { +void UnstartedRuntime::UnstartedJNIFieldGetNameInternal([[maybe_unused]] Thread* self, + [[maybe_unused]] ArtMethod* method, + mirror::Object* receiver, + [[maybe_unused]] uint32_t* args, + JValue* result) { ObjPtr<mirror::Field> field = ObjPtr<mirror::Field>::DownCast(receiver); ArtField* art_field = field->GetArtField(); result->SetL(art_field->ResolveNameString()); diff --git a/runtime/jit/jit.cc b/runtime/jit/jit.cc index b231cce0bc..ea475b503f 100644 --- a/runtime/jit/jit.cc +++ b/runtime/jit/jit.cc @@ -866,7 +866,7 @@ class JitDoneCompilingProfileTask final : public SelfDeletingTask { explicit JitDoneCompilingProfileTask(const std::vector<const DexFile*>& dex_files) : dex_files_(dex_files) {} - void Run(Thread* self ATTRIBUTE_UNUSED) override { + void Run([[maybe_unused]] Thread* self) override { // Madvise DONTNEED dex files now that we're done compiling methods. for (const DexFile* dex_file : dex_files_) { if (IsAddressKnownBackedByFileOrShared(dex_file->Begin())) { @@ -890,7 +890,7 @@ class JitZygoteDoneCompilingTask final : public SelfDeletingTask { public: JitZygoteDoneCompilingTask() {} - void Run(Thread* self ATTRIBUTE_UNUSED) override { + void Run([[maybe_unused]] Thread* self) override { DCHECK(Runtime::Current()->IsZygote()); Runtime::Current()->GetJit()->GetCodeCache()->GetZygoteMap()->SetCompilationState( ZygoteCompilationState::kDone); diff --git a/runtime/jit/jit_memory_region.cc b/runtime/jit/jit_memory_region.cc index 410bf7004a..cbfd39a24f 100644 --- a/runtime/jit/jit_memory_region.cc +++ b/runtime/jit/jit_memory_region.cc @@ -590,8 +590,8 @@ int JitMemoryRegion::CreateZygoteMemory(size_t capacity, std::string* error_msg) return fd; } -bool JitMemoryRegion::ProtectZygoteMemory(int fd ATTRIBUTE_UNUSED, - std::string* error_msg ATTRIBUTE_UNUSED) { +bool JitMemoryRegion::ProtectZygoteMemory([[maybe_unused]] int fd, + [[maybe_unused]] std::string* error_msg) { return true; } diff --git a/runtime/jit/jit_memory_region_test.cc b/runtime/jit/jit_memory_region_test.cc index 2a7977713c..a77ea8128e 100644 --- a/runtime/jit/jit_memory_region_test.cc +++ b/runtime/jit/jit_memory_region_test.cc @@ -39,8 +39,7 @@ static constexpr int kReturnFromFault = 42; // These globals are only set in child processes. void* gAddrToFaultOn = nullptr; -[[noreturn]] -void handler(int ATTRIBUTE_UNUSED, siginfo_t* info, void* ATTRIBUTE_UNUSED) { +[[noreturn]] void handler([[maybe_unused]] int, siginfo_t* info, [[maybe_unused]] void*) { CHECK_EQ(info->si_addr, gAddrToFaultOn); exit(kReturnFromFault); } diff --git a/runtime/jni/check_jni.cc b/runtime/jni/check_jni.cc index eb54f9897f..3dc9b9f475 100644 --- a/runtime/jni/check_jni.cc +++ b/runtime/jni/check_jni.cc @@ -1617,8 +1617,10 @@ class GuardedCopy { * Perform the array "release" operation, which may or may not copy data * back into the managed heap, and may or may not release the underlying storage. */ - static void* ReleaseGuardedPACopy(const char* function_name, JNIEnv* env, - jarray java_array ATTRIBUTE_UNUSED, void* embedded_buf, + static void* ReleaseGuardedPACopy(const char* function_name, + JNIEnv* env, + [[maybe_unused]] jarray java_array, + void* embedded_buf, int mode) { ScopedObjectAccess soa(env); if (!GuardedCopy::Check(function_name, embedded_buf, true)) { @@ -1635,7 +1637,6 @@ class GuardedCopy { return original_ptr; } - /* * Free up the guard buffer, scrub it, and return the original pointer. */ diff --git a/runtime/jni/java_vm_ext_test.cc b/runtime/jni/java_vm_ext_test.cc index cae33b5f9d..e7295aaa62 100644 --- a/runtime/jni/java_vm_ext_test.cc +++ b/runtime/jni/java_vm_ext_test.cc @@ -62,7 +62,7 @@ TEST_F(JavaVmExtTest, JNI_GetCreatedJavaVMs) { static bool gSmallStack = false; static bool gAsDaemon = false; -static void* attach_current_thread_callback(void* arg ATTRIBUTE_UNUSED) { +static void* attach_current_thread_callback([[maybe_unused]] void* arg) { JavaVM* vms_buf[1]; jsize num_vms; JNIEnv* env; diff --git a/runtime/jni/jni_env_ext.cc b/runtime/jni/jni_env_ext.cc index bef0fd36cd..fcf38bafb0 100644 --- a/runtime/jni/jni_env_ext.cc +++ b/runtime/jni/jni_env_ext.cc @@ -289,7 +289,7 @@ void JNIEnvExt::CheckNoHeldMonitors() { } } -void ThreadResetFunctionTable(Thread* thread, void* arg ATTRIBUTE_UNUSED) +void ThreadResetFunctionTable(Thread* thread, [[maybe_unused]] void* arg) REQUIRES(Locks::jni_function_table_lock_) { JNIEnvExt* env = thread->GetJniEnv(); bool check_jni = env->IsCheckJniEnabled(); diff --git a/runtime/jni/jni_id_manager.cc b/runtime/jni/jni_id_manager.cc index e556f61a6b..5af1a78013 100644 --- a/runtime/jni/jni_id_manager.cc +++ b/runtime/jni/jni_id_manager.cc @@ -100,7 +100,7 @@ bool ShouldReturnPointer(ObjPtr<mirror::Class> klass, ArtType* t) REQUIRES_SHARED(Locks::mutator_lock_); template <> -bool ShouldReturnPointer(ObjPtr<mirror::Class> klass, ArtMethod* t ATTRIBUTE_UNUSED) { +bool ShouldReturnPointer(ObjPtr<mirror::Class> klass, [[maybe_unused]] ArtMethod* t) { ObjPtr<mirror::ClassExt> ext(klass->GetExtData()); if (ext.IsNull()) { return true; @@ -176,7 +176,7 @@ template <typename ArtType> size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtType* t, PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_); template <> -size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtField* f, PointerSize ptr_size ATTRIBUTE_UNUSED) { +size_t GetIdOffset(ObjPtr<mirror::Class> k, ArtField* f, [[maybe_unused]] PointerSize ptr_size) { return f->IsStatic() ? k->GetStaticFieldIdOffset(f) : k->GetInstanceFieldIdOffset(f); } template <> @@ -208,7 +208,7 @@ std::string PrettyGeneric(ReflectiveHandle<ArtField> f) { template <typename ArtType> bool CanUseIdArrays(ReflectiveHandle<ArtType> t) REQUIRES_SHARED(Locks::mutator_lock_); template <> -bool CanUseIdArrays(ReflectiveHandle<ArtField> t ATTRIBUTE_UNUSED) { +bool CanUseIdArrays([[maybe_unused]] ReflectiveHandle<ArtField> t) { return true; } template <> @@ -264,7 +264,7 @@ std::vector<ArtMethod*>& JniIdManager::GetGenericMap<ArtMethod>() { } template <> size_t JniIdManager::GetLinearSearchStartId<ArtField>( - ReflectiveHandle<ArtField> t ATTRIBUTE_UNUSED) { + [[maybe_unused]] ReflectiveHandle<ArtField> t) { return deferred_allocation_field_id_start_; } diff --git a/runtime/jni/jni_internal.cc b/runtime/jni/jni_internal.cc index ad2efc5944..71bed2e0be 100644 --- a/runtime/jni/jni_internal.cc +++ b/runtime/jni/jni_internal.cc @@ -162,7 +162,7 @@ class NewStringUTFVisitor { NewStringUTFVisitor(const char* utf, size_t utf8_length, int32_t count, bool has_bad_char) : utf_(utf), utf8_length_(utf8_length), count_(count), has_bad_char_(has_bad_char) {} - void operator()(ObjPtr<mirror::Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<mirror::Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { // Avoid AsString as object is not yet in live bitmap or allocation stack. ObjPtr<mirror::String> string = ObjPtr<mirror::String>::DownCast(obj); @@ -226,7 +226,7 @@ constexpr bool kUtfReplaceBadSurrogates = false; jsize GetUncompressedStringUTFLength(const uint16_t* chars, size_t length) { jsize byte_count = 0; ConvertUtf16ToUtf8<kUtfUseShortZero, kUtfUse4ByteSequence, kUtfReplaceBadSurrogates>( - chars, length, [&](char c ATTRIBUTE_UNUSED) { ++byte_count; }); + chars, length, [&]([[maybe_unused]] char c) { ++byte_count; }); return byte_count; } @@ -2830,7 +2830,7 @@ class JNI { return static_cast<jlong>(WellKnownClasses::java_nio_Buffer_capacity->GetInt(buffer.Get())); } - static jobjectRefType GetObjectRefType(JNIEnv* env ATTRIBUTE_UNUSED, jobject java_object) { + static jobjectRefType GetObjectRefType([[maybe_unused]] JNIEnv* env, jobject java_object) { if (java_object == nullptr) { return JNIInvalidRefType; } diff --git a/runtime/jni/local_reference_table.h b/runtime/jni/local_reference_table.h index 900e4c3e12..22fa4a91df 100644 --- a/runtime/jni/local_reference_table.h +++ b/runtime/jni/local_reference_table.h @@ -333,7 +333,7 @@ class LocalReferenceTable { void SetSegmentState(LRTSegmentState new_state); - static Offset SegmentStateOffset(size_t pointer_size ATTRIBUTE_UNUSED) { + static Offset SegmentStateOffset([[maybe_unused]] size_t pointer_size) { // Note: Currently segment_state_ is at offset 0. We're testing the expected value in // jni_internal_test to make sure it stays correct. It is not OFFSETOF_MEMBER, as that // is not pointer-size-safe. diff --git a/runtime/metrics/reporter_test.cc b/runtime/metrics/reporter_test.cc index 848a74ee61..a61f8be75b 100644 --- a/runtime/metrics/reporter_test.cc +++ b/runtime/metrics/reporter_test.cc @@ -65,10 +65,10 @@ class TestBackend : public MetricsBackend { current_report_->data.Put(counter_type, value); } - void ReportHistogram(DatumId histogram_type ATTRIBUTE_UNUSED, - int64_t low_value ATTRIBUTE_UNUSED, - int64_t high_value ATTRIBUTE_UNUSED, - const std::vector<uint32_t>& buckets ATTRIBUTE_UNUSED) override { + void ReportHistogram([[maybe_unused]] DatumId histogram_type, + [[maybe_unused]] int64_t low_value, + [[maybe_unused]] int64_t high_value, + [[maybe_unused]] const std::vector<uint32_t>& buckets) override { // TODO: nothing yet. We should implement and test histograms as well. } diff --git a/runtime/mirror/accessible_object.h b/runtime/mirror/accessible_object.h index 7c0d91ab4f..1e434a124b 100644 --- a/runtime/mirror/accessible_object.h +++ b/runtime/mirror/accessible_object.h @@ -39,7 +39,7 @@ class MANAGED AccessibleObject : public Object { private: // We only use the field indirectly using the FlagOffset() method. - uint8_t flag_ ATTRIBUTE_UNUSED; + [[maybe_unused]] uint8_t flag_; DISALLOW_IMPLICIT_CONSTRUCTORS(AccessibleObject); }; diff --git a/runtime/mirror/array-alloc-inl.h b/runtime/mirror/array-alloc-inl.h index c1e0175c8c..32840d4b78 100644 --- a/runtime/mirror/array-alloc-inl.h +++ b/runtime/mirror/array-alloc-inl.h @@ -67,7 +67,7 @@ class SetLengthVisitor { explicit SetLengthVisitor(int32_t length) : length_(length) { } - void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { // Avoid AsArray as object is not yet in live bitmap or allocation stack. ObjPtr<Array> array = ObjPtr<Array>::DownCast(obj); diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h index 5d64167566..e7cfb92bdc 100644 --- a/runtime/mirror/array.h +++ b/runtime/mirror/array.h @@ -140,10 +140,10 @@ class MANAGED Array : public Object { // The number of array elements. // We only use the field indirectly using the LengthOffset() method. - int32_t length_ ATTRIBUTE_UNUSED; + [[maybe_unused]] int32_t length_; // Marker for the data (used by generated code) // We only use the field indirectly using the DataOffset() method. - uint32_t first_element_[0] ATTRIBUTE_UNUSED; + [[maybe_unused]] uint32_t first_element_[0]; DISALLOW_IMPLICIT_CONSTRUCTORS(Array); }; diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc index 6458613c37..296eeed58a 100644 --- a/runtime/mirror/class.cc +++ b/runtime/mirror/class.cc @@ -1605,9 +1605,9 @@ void Class::PopulateEmbeddedVTable(PointerSize pointer_size) { class ReadBarrierOnNativeRootsVisitor { public: - void operator()(ObjPtr<Object> obj ATTRIBUTE_UNUSED, - MemberOffset offset ATTRIBUTE_UNUSED, - bool is_static ATTRIBUTE_UNUSED) const {} + void operator()([[maybe_unused]] ObjPtr<Object> obj, + [[maybe_unused]] MemberOffset offset, + [[maybe_unused]] bool is_static) const {} void VisitRootIfNonNull(CompressedReference<Object>* root) const REQUIRES_SHARED(Locks::mutator_lock_) { @@ -1644,7 +1644,7 @@ class CopyClassVisitor { copy_bytes_(copy_bytes), imt_(imt), pointer_size_(pointer_size) { } - void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { StackHandleScope<1> hs(self_); Handle<mirror::Class> h_new_class_obj(hs.NewHandle(obj->AsClass())); diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h index 197172c0b4..8a7ab88d54 100644 --- a/runtime/mirror/class_loader.h +++ b/runtime/mirror/class_loader.h @@ -81,7 +81,7 @@ class MANAGED ClassLoader : public Object { HeapReference<ClassLoader> parent_; HeapReference<Object> proxyCache_; // Native pointer to class table, need to zero this out when image writing. - uint32_t padding_ ATTRIBUTE_UNUSED; + [[maybe_unused]] uint32_t padding_; uint64_t allocator_; uint64_t class_table_; diff --git a/runtime/mirror/executable.h b/runtime/mirror/executable.h index dc4ec95243..079efc3320 100644 --- a/runtime/mirror/executable.h +++ b/runtime/mirror/executable.h @@ -64,7 +64,7 @@ class MANAGED Executable : public AccessibleObject { uint8_t has_real_parameter_data_; // Padding required for matching alignment with the Java peer. - uint8_t padding_[2] ATTRIBUTE_UNUSED; + [[maybe_unused]] uint8_t padding_[2]; HeapReference<mirror::Class> declaring_class_; HeapReference<mirror::Class> declaring_class_of_overridden_method_; diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc index 5016c2003a..940b82dc0a 100644 --- a/runtime/mirror/object.cc +++ b/runtime/mirror/object.cc @@ -66,9 +66,9 @@ class CopyReferenceFieldsWithReadBarrierVisitor { } // Unused since we don't copy class native roots. - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const {} - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + void VisitRootIfNonNull( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} + void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const {} private: const ObjPtr<Object> dest_obj_; @@ -144,7 +144,7 @@ class CopyObjectVisitor { CopyObjectVisitor(Handle<Object>* orig, size_t num_bytes) : orig_(orig), num_bytes_(num_bytes) {} - void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { Object::CopyObject(obj, orig_->Get(), num_bytes_); } diff --git a/runtime/mirror/string-alloc-inl.h b/runtime/mirror/string-alloc-inl.h index cb2dcb2c85..9c2529cef4 100644 --- a/runtime/mirror/string-alloc-inl.h +++ b/runtime/mirror/string-alloc-inl.h @@ -41,7 +41,7 @@ class SetStringCountVisitor { explicit SetStringCountVisitor(int32_t count) : count_(count) { } - void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { // Avoid AsString as object is not yet in live bitmap or allocation stack. ObjPtr<String> string = ObjPtr<String>::DownCast(obj); @@ -61,7 +61,7 @@ class SetStringCountAndBytesVisitor { : count_(count), src_array_(src_array), offset_(offset), high_byte_(high_byte) { } - void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { // Avoid AsString as object is not yet in live bitmap or allocation stack. ObjPtr<String> string = ObjPtr<String>::DownCast(obj); @@ -96,7 +96,7 @@ class SetStringCountAndUtf16BytesVisitor { : count_(count), src_array_(src_array), offset_(offset) { } - void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { // Avoid AsString as object is not yet in live bitmap or allocation stack. ObjPtr<String> string = ObjPtr<String>::DownCast(obj); @@ -132,7 +132,7 @@ class SetStringCountAndValueVisitorFromCharArray { count_(count), src_array_(src_array), offset_(offset) { } - void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { // Avoid AsString as object is not yet in live bitmap or allocation stack. ObjPtr<String> string = ObjPtr<String>::DownCast(obj); @@ -163,7 +163,7 @@ class SetStringCountAndValueVisitorFromString { count_(count), src_string_(src_string), offset_(offset) { } - void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const + void operator()(ObjPtr<Object> obj, [[maybe_unused]] size_t usable_size) const REQUIRES_SHARED(Locks::mutator_lock_) { // Avoid AsString as object is not yet in live bitmap or allocation stack. ObjPtr<String> string = ObjPtr<String>::DownCast(obj); diff --git a/runtime/native/dalvik_system_DexFile.cc b/runtime/native/dalvik_system_DexFile.cc index 9f0c2161f4..f602f73630 100644 --- a/runtime/native/dalvik_system_DexFile.cc +++ b/runtime/native/dalvik_system_DexFile.cc @@ -368,8 +368,8 @@ static bool isReadOnlyJavaDclChecked() { static jobject DexFile_openDexFileNative(JNIEnv* env, jclass, jstring javaSourceName, - jstring javaOutputName ATTRIBUTE_UNUSED, - jint flags ATTRIBUTE_UNUSED, + [[maybe_unused]] jstring javaOutputName, + [[maybe_unused]] jint flags, jobject class_loader, jobjectArray dex_elements) { ScopedUtfChars sourceName(env, javaSourceName); @@ -758,8 +758,8 @@ static jboolean DexFile_isDexOptNeeded(JNIEnv* env, jclass, jstring javaFilename } static jboolean DexFile_isValidCompilerFilter(JNIEnv* env, - jclass javeDexFileClass ATTRIBUTE_UNUSED, - jstring javaCompilerFilter) { + [[maybe_unused]] jclass javaDexFileClass, + jstring javaCompilerFilter) { ScopedUtfChars compiler_filter(env, javaCompilerFilter); if (env->ExceptionCheck()) { return -1; @@ -771,7 +771,7 @@ static jboolean DexFile_isValidCompilerFilter(JNIEnv* env, } static jboolean DexFile_isProfileGuidedCompilerFilter(JNIEnv* env, - jclass javeDexFileClass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass javaDexFileClass, jstring javaCompilerFilter) { ScopedUtfChars compiler_filter(env, javaCompilerFilter); if (env->ExceptionCheck()) { @@ -786,7 +786,7 @@ static jboolean DexFile_isProfileGuidedCompilerFilter(JNIEnv* env, } static jboolean DexFile_isVerifiedCompilerFilter(JNIEnv* env, - jclass javeDexFileClass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass javaDexFileClass, jstring javaCompilerFilter) { ScopedUtfChars compiler_filter(env, javaCompilerFilter); if (env->ExceptionCheck()) { @@ -801,7 +801,7 @@ static jboolean DexFile_isVerifiedCompilerFilter(JNIEnv* env, } static jboolean DexFile_isOptimizedCompilerFilter(JNIEnv* env, - jclass javeDexFileClass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass javaDexFileClass, jstring javaCompilerFilter) { ScopedUtfChars compiler_filter(env, javaCompilerFilter); if (env->ExceptionCheck()) { @@ -816,12 +816,12 @@ static jboolean DexFile_isOptimizedCompilerFilter(JNIEnv* env, } static jboolean DexFile_isReadOnlyJavaDclEnforced(JNIEnv* env, - jclass javeDexFileClass ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass javaDexFileClass) { return (isReadOnlyJavaDclChecked() && isReadOnlyJavaDclEnforced(env)) ? JNI_TRUE : JNI_FALSE; } static jstring DexFile_getNonProfileGuidedCompilerFilter(JNIEnv* env, - jclass javeDexFileClass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass javaDexFileClass, jstring javaCompilerFilter) { ScopedUtfChars compiler_filter(env, javaCompilerFilter); if (env->ExceptionCheck()) { @@ -846,7 +846,7 @@ static jstring DexFile_getNonProfileGuidedCompilerFilter(JNIEnv* env, } static jstring DexFile_getSafeModeCompilerFilter(JNIEnv* env, - jclass javeDexFileClass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass javaDexFileClass, jstring javaCompilerFilter) { ScopedUtfChars compiler_filter(env, javaCompilerFilter); if (env->ExceptionCheck()) { diff --git a/runtime/native/dalvik_system_VMDebug.cc b/runtime/native/dalvik_system_VMDebug.cc index 3653a83915..65d131ab48 100644 --- a/runtime/native/dalvik_system_VMDebug.cc +++ b/runtime/native/dalvik_system_VMDebug.cc @@ -90,7 +90,7 @@ static void VMDebug_startMethodTracingDdmsImpl(JNIEnv*, jclass, jint bufferSize, static void VMDebug_startMethodTracingFd(JNIEnv* env, jclass, - jstring javaTraceFilename ATTRIBUTE_UNUSED, + [[maybe_unused]] jstring javaTraceFilename, jint javaFd, jint bufferSize, jint flags, diff --git a/runtime/native/dalvik_system_VMRuntime.cc b/runtime/native/dalvik_system_VMRuntime.cc index 9e2e8b9f80..1ffb7cea38 100644 --- a/runtime/native/dalvik_system_VMRuntime.cc +++ b/runtime/native/dalvik_system_VMRuntime.cc @@ -237,8 +237,8 @@ static jboolean VMRuntime_isCheckJniEnabled(JNIEnv* env, jobject) { return down_cast<JNIEnvExt*>(env)->GetVm()->IsCheckJniEnabled() ? JNI_TRUE : JNI_FALSE; } -static jint VMRuntime_getSdkVersionNative(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED, +static jint VMRuntime_getSdkVersionNative([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass, jint default_sdk_version) { return android::base::GetIntProperty("ro.build.version.sdk", default_sdk_version); @@ -355,8 +355,7 @@ static void VMRuntime_runHeapTasks(JNIEnv* env, jobject) { Runtime::Current()->GetHeap()->GetTaskProcessor()->RunAllTasks(Thread::ForEnv(env)); } -static void VMRuntime_preloadDexCaches(JNIEnv* env ATTRIBUTE_UNUSED, jobject) { -} +static void VMRuntime_preloadDexCaches([[maybe_unused]] JNIEnv* env, jobject) {} /* * This is called by the framework after it loads a code path on behalf of the app. @@ -364,7 +363,7 @@ static void VMRuntime_preloadDexCaches(JNIEnv* env ATTRIBUTE_UNUSED, jobject) { * for more precise telemetry (e.g. is the split apk odex up to date?) and debugging. */ static void VMRuntime_registerAppInfo(JNIEnv* env, - jclass clazz ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass clazz, jstring package_name, jstring cur_profile_file, jstring ref_profile_file, @@ -418,8 +417,8 @@ static jstring VMRuntime_getCurrentInstructionSet(JNIEnv* env, jclass) { return env->NewStringUTF(GetInstructionSetString(kRuntimeISA)); } -static void VMRuntime_setSystemDaemonThreadPriority(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +static void VMRuntime_setSystemDaemonThreadPriority([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { #ifdef ART_TARGET_ANDROID Thread* self = Thread::Current(); DCHECK(self != nullptr); @@ -435,14 +434,14 @@ static void VMRuntime_setSystemDaemonThreadPriority(JNIEnv* env ATTRIBUTE_UNUSED #endif } -static void VMRuntime_setDedupeHiddenApiWarnings(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED, +static void VMRuntime_setDedupeHiddenApiWarnings([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass, jboolean dedupe) { Runtime::Current()->SetDedupeHiddenApiWarnings(dedupe); } static void VMRuntime_setProcessPackageName(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jstring java_package_name) { ScopedUtfChars package_name(env, java_package_name); Runtime::Current()->SetProcessPackageName(package_name.c_str()); @@ -453,8 +452,7 @@ static void VMRuntime_setProcessDataDirectory(JNIEnv* env, jclass, jstring java_ Runtime::Current()->SetProcessDataDirectory(data_dir.c_str()); } -static void VMRuntime_bootCompleted(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +static void VMRuntime_bootCompleted([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass) { jit::Jit* jit = Runtime::Current()->GetJit(); if (jit != nullptr) { jit->BootCompleted(); @@ -482,14 +480,14 @@ class ClearJitCountersVisitor : public ClassVisitor { } }; -static void VMRuntime_resetJitCounters(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { +static void VMRuntime_resetJitCounters(JNIEnv* env, [[maybe_unused]] jclass klass) { ScopedObjectAccess soa(env); ClearJitCountersVisitor visitor; Runtime::Current()->GetClassLinker()->VisitClasses(&visitor); } static jboolean VMRuntime_isValidClassLoaderContext(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jstring jencoded_class_loader_context) { if (UNLIKELY(jencoded_class_loader_context == nullptr)) { ScopedFastNativeObjectAccess soa(env); @@ -500,7 +498,7 @@ static jboolean VMRuntime_isValidClassLoaderContext(JNIEnv* env, return ClassLoaderContext::IsValidEncoding(encoded_class_loader_context.c_str()); } -static jobject VMRuntime_getBaseApkOptimizationInfo(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { +static jobject VMRuntime_getBaseApkOptimizationInfo(JNIEnv* env, [[maybe_unused]] jclass klass) { AppInfo* app_info = Runtime::Current()->GetAppInfo(); DCHECK(app_info != nullptr); diff --git a/runtime/native/dalvik_system_ZygoteHooks.cc b/runtime/native/dalvik_system_ZygoteHooks.cc index 3c73cc569e..5ea6d3fc3e 100644 --- a/runtime/native/dalvik_system_ZygoteHooks.cc +++ b/runtime/native/dalvik_system_ZygoteHooks.cc @@ -266,8 +266,8 @@ static void ZygoteHooks_nativePostZygoteFork(JNIEnv*, jclass) { Runtime::Current()->PostZygoteFork(); } -static void ZygoteHooks_nativePostForkSystemServer(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED, +static void ZygoteHooks_nativePostForkSystemServer([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass, jint runtime_flags) { // Reload the current flags first. In case we need to take actions based on them. Runtime::Current()->ReloadAllFlags(__FUNCTION__); @@ -441,18 +441,18 @@ static void ZygoteHooks_nativePostForkChild(JNIEnv* env, } } -static void ZygoteHooks_startZygoteNoThreadCreation(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +static void ZygoteHooks_startZygoteNoThreadCreation([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { Runtime::Current()->SetZygoteNoThreadSection(true); } -static void ZygoteHooks_stopZygoteNoThreadCreation(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +static void ZygoteHooks_stopZygoteNoThreadCreation([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { Runtime::Current()->SetZygoteNoThreadSection(false); } -static jboolean ZygoteHooks_nativeZygoteLongSuspendOk(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +static jboolean ZygoteHooks_nativeZygoteLongSuspendOk([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { // Indefinite thread suspensions are not OK if we're supposed to be JIT-compiling for other // processes. We only care about JIT compilation that affects other processes. The zygote // itself doesn't run appreciable amounts of Java code when running single-threaded, so @@ -464,7 +464,6 @@ static jboolean ZygoteHooks_nativeZygoteLongSuspendOk(JNIEnv* env ATTRIBUTE_UNUS return (isJitZygote || explicitlyDisabled) ? JNI_FALSE : JNI_TRUE; } - static JNINativeMethod gMethods[] = { NATIVE_METHOD(ZygoteHooks, nativePreFork, "()J"), NATIVE_METHOD(ZygoteHooks, nativePostZygoteFork, "()V"), diff --git a/runtime/native/java_lang_reflect_Constructor.cc b/runtime/native/java_lang_reflect_Constructor.cc index 4b2cc43ed3..98afddc260 100644 --- a/runtime/native/java_lang_reflect_Constructor.cc +++ b/runtime/native/java_lang_reflect_Constructor.cc @@ -120,11 +120,13 @@ static jobject Constructor_newInstance0(JNIEnv* env, jobject javaMethod, jobject return javaReceiver; } -static jobject Constructor_newInstanceFromSerialization(JNIEnv* env, jclass unused ATTRIBUTE_UNUSED, - jclass ctorClass, jclass allocClass) { - jmethodID ctor = env->GetMethodID(ctorClass, "<init>", "()V"); - DCHECK(ctor != nullptr); - return env->NewObject(allocClass, ctor); +static jobject Constructor_newInstanceFromSerialization(JNIEnv* env, + [[maybe_unused]] jclass unused, + jclass ctorClass, + jclass allocClass) { + jmethodID ctor = env->GetMethodID(ctorClass, "<init>", "()V"); + DCHECK(ctor != nullptr); + return env->NewObject(allocClass, ctor); } static JNINativeMethod gMethods[] = { diff --git a/runtime/native/jdk_internal_misc_Unsafe.cc b/runtime/native/jdk_internal_misc_Unsafe.cc index 6e2f558dce..9b2021d176 100644 --- a/runtime/native/jdk_internal_misc_Unsafe.cc +++ b/runtime/native/jdk_internal_misc_Unsafe.cc @@ -261,11 +261,11 @@ static jint Unsafe_getArrayIndexScaleForComponentType(JNIEnv* env, jclass, jclas return Primitive::ComponentSize(primitive_type); } -static jint Unsafe_addressSize(JNIEnv* env ATTRIBUTE_UNUSED, jobject ob ATTRIBUTE_UNUSED) { +static jint Unsafe_addressSize([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject ob) { return sizeof(void*); } -static jint Unsafe_pageSize(JNIEnv* env ATTRIBUTE_UNUSED, jobject ob ATTRIBUTE_UNUSED) { +static jint Unsafe_pageSize([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject ob) { return sysconf(_SC_PAGESIZE); } @@ -288,73 +288,80 @@ static jlong Unsafe_allocateMemory(JNIEnv* env, jobject, jlong bytes) { return reinterpret_cast<uintptr_t>(mem); } -static void Unsafe_freeMemory(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static void Unsafe_freeMemory([[maybe_unused]] JNIEnv* env, jobject, jlong address) { free(reinterpret_cast<void*>(static_cast<uintptr_t>(address))); } -static void Unsafe_setMemory(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jlong bytes, jbyte value) { +static void Unsafe_setMemory( + [[maybe_unused]] JNIEnv* env, jobject, jlong address, jlong bytes, jbyte value) { memset(reinterpret_cast<void*>(static_cast<uintptr_t>(address)), value, bytes); } -static jbyte Unsafe_getByteJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jbyte Unsafe_getByteJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jbyte*>(address); } -static void Unsafe_putByteJB(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jbyte value) { +static void Unsafe_putByteJB([[maybe_unused]] JNIEnv* env, jobject, jlong address, jbyte value) { *reinterpret_cast<jbyte*>(address) = value; } -static jshort Unsafe_getShortJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jshort Unsafe_getShortJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jshort*>(address); } -static void Unsafe_putShortJS(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jshort value) { +static void Unsafe_putShortJS([[maybe_unused]] JNIEnv* env, jobject, jlong address, jshort value) { *reinterpret_cast<jshort*>(address) = value; } -static jchar Unsafe_getCharJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jchar Unsafe_getCharJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jchar*>(address); } -static void Unsafe_putCharJC(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jchar value) { +static void Unsafe_putCharJC([[maybe_unused]] JNIEnv* env, jobject, jlong address, jchar value) { *reinterpret_cast<jchar*>(address) = value; } -static jint Unsafe_getIntJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jint Unsafe_getIntJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jint*>(address); } -static void Unsafe_putIntJI(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jint value) { +static void Unsafe_putIntJI([[maybe_unused]] JNIEnv* env, jobject, jlong address, jint value) { *reinterpret_cast<jint*>(address) = value; } -static jlong Unsafe_getLongJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jlong Unsafe_getLongJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jlong*>(address); } -static void Unsafe_putLongJJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jlong value) { +static void Unsafe_putLongJJ([[maybe_unused]] JNIEnv* env, jobject, jlong address, jlong value) { *reinterpret_cast<jlong*>(address) = value; } -static jfloat Unsafe_getFloatJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jfloat Unsafe_getFloatJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jfloat*>(address); } -static void Unsafe_putFloatJF(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jfloat value) { +static void Unsafe_putFloatJF([[maybe_unused]] JNIEnv* env, jobject, jlong address, jfloat value) { *reinterpret_cast<jfloat*>(address) = value; } -static jdouble Unsafe_getDoubleJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jdouble Unsafe_getDoubleJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jdouble*>(address); } -static void Unsafe_putDoubleJD(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jdouble value) { +static void Unsafe_putDoubleJD([[maybe_unused]] JNIEnv* env, + jobject, + jlong address, + jdouble value) { *reinterpret_cast<jdouble*>(address) = value; } -static void Unsafe_copyMemory0(JNIEnv *env, jobject unsafe ATTRIBUTE_UNUSED, - jobject srcObj, jlong srcOffset, - jobject dstObj, jlong dstOffset, - jlong size) { +static void Unsafe_copyMemory0(JNIEnv* env, + [[maybe_unused]] jobject unsafe, + jobject srcObj, + jlong srcOffset, + jobject dstObj, + jlong dstOffset, + jlong size) { ScopedFastNativeObjectAccess soa(env); if (size == 0) { return; diff --git a/runtime/native/libcore_util_CharsetUtils.cc b/runtime/native/libcore_util_CharsetUtils.cc index c53fd6e6e1..46f8993a10 100644 --- a/runtime/native/libcore_util_CharsetUtils.cc +++ b/runtime/native/libcore_util_CharsetUtils.cc @@ -113,7 +113,7 @@ static jbyteArray CharsetUtils_toUtf8Bytes(JNIEnv* env, jclass, jstring java_str utf8_length = length; } else { const uint16_t* utf16 = string->GetValue() + offset; - auto count_length = [&utf8_length](jbyte c ATTRIBUTE_UNUSED) ALWAYS_INLINE { ++utf8_length; }; + auto count_length = [&utf8_length]([[maybe_unused]] jbyte c) ALWAYS_INLINE { ++utf8_length; }; ConvertUtf16ToUtf8</*kUseShortZero=*/ true, /*kUse4ByteSequence=*/ true, /*kReplaceBadSurrogates=*/ true>(utf16, length, count_length); diff --git a/runtime/native/sun_misc_Unsafe.cc b/runtime/native/sun_misc_Unsafe.cc index 8a203cee1a..f1e47ee100 100644 --- a/runtime/native/sun_misc_Unsafe.cc +++ b/runtime/native/sun_misc_Unsafe.cc @@ -219,11 +219,11 @@ static jint Unsafe_getArrayIndexScaleForComponentType(JNIEnv* env, jclass, jclas return Primitive::ComponentSize(primitive_type); } -static jint Unsafe_addressSize(JNIEnv* env ATTRIBUTE_UNUSED, jobject ob ATTRIBUTE_UNUSED) { +static jint Unsafe_addressSize([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject ob) { return sizeof(void*); } -static jint Unsafe_pageSize(JNIEnv* env ATTRIBUTE_UNUSED, jobject ob ATTRIBUTE_UNUSED) { +static jint Unsafe_pageSize([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject ob) { return sysconf(_SC_PAGESIZE); } @@ -242,71 +242,75 @@ static jlong Unsafe_allocateMemory(JNIEnv* env, jobject, jlong bytes) { return (uintptr_t) mem; } -static void Unsafe_freeMemory(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static void Unsafe_freeMemory([[maybe_unused]] JNIEnv* env, jobject, jlong address) { free(reinterpret_cast<void*>(static_cast<uintptr_t>(address))); } -static void Unsafe_setMemory(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jlong bytes, jbyte value) { +static void Unsafe_setMemory( + [[maybe_unused]] JNIEnv* env, jobject, jlong address, jlong bytes, jbyte value) { memset(reinterpret_cast<void*>(static_cast<uintptr_t>(address)), value, bytes); } -static jbyte Unsafe_getByteJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jbyte Unsafe_getByteJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jbyte*>(address); } -static void Unsafe_putByteJB(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jbyte value) { +static void Unsafe_putByteJB([[maybe_unused]] JNIEnv* env, jobject, jlong address, jbyte value) { *reinterpret_cast<jbyte*>(address) = value; } -static jshort Unsafe_getShortJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jshort Unsafe_getShortJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jshort*>(address); } -static void Unsafe_putShortJS(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jshort value) { +static void Unsafe_putShortJS([[maybe_unused]] JNIEnv* env, jobject, jlong address, jshort value) { *reinterpret_cast<jshort*>(address) = value; } -static jchar Unsafe_getCharJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jchar Unsafe_getCharJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jchar*>(address); } -static void Unsafe_putCharJC(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jchar value) { +static void Unsafe_putCharJC([[maybe_unused]] JNIEnv* env, jobject, jlong address, jchar value) { *reinterpret_cast<jchar*>(address) = value; } -static jint Unsafe_getIntJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jint Unsafe_getIntJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jint*>(address); } -static void Unsafe_putIntJI(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jint value) { +static void Unsafe_putIntJI([[maybe_unused]] JNIEnv* env, jobject, jlong address, jint value) { *reinterpret_cast<jint*>(address) = value; } -static jlong Unsafe_getLongJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jlong Unsafe_getLongJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jlong*>(address); } -static void Unsafe_putLongJJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jlong value) { +static void Unsafe_putLongJJ([[maybe_unused]] JNIEnv* env, jobject, jlong address, jlong value) { *reinterpret_cast<jlong*>(address) = value; } -static jfloat Unsafe_getFloatJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jfloat Unsafe_getFloatJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jfloat*>(address); } -static void Unsafe_putFloatJF(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jfloat value) { +static void Unsafe_putFloatJF([[maybe_unused]] JNIEnv* env, jobject, jlong address, jfloat value) { *reinterpret_cast<jfloat*>(address) = value; } -static jdouble Unsafe_getDoubleJ(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address) { +static jdouble Unsafe_getDoubleJ([[maybe_unused]] JNIEnv* env, jobject, jlong address) { return *reinterpret_cast<jdouble*>(address); } -static void Unsafe_putDoubleJD(JNIEnv* env ATTRIBUTE_UNUSED, jobject, jlong address, jdouble value) { +static void Unsafe_putDoubleJD([[maybe_unused]] JNIEnv* env, + jobject, + jlong address, + jdouble value) { *reinterpret_cast<jdouble*>(address) = value; } -static void Unsafe_copyMemory(JNIEnv *env, jobject unsafe ATTRIBUTE_UNUSED, jlong src, - jlong dst, jlong size) { +static void Unsafe_copyMemory( + JNIEnv* env, [[maybe_unused]] jobject unsafe, jlong src, jlong dst, jlong size) { if (size == 0) { return; } @@ -347,8 +351,8 @@ static void copyFromArray(jlong dstAddr, } } -static void Unsafe_copyMemoryToPrimitiveArray(JNIEnv *env, - jobject unsafe ATTRIBUTE_UNUSED, +static void Unsafe_copyMemoryToPrimitiveArray(JNIEnv* env, + [[maybe_unused]] jobject unsafe, jlong srcAddr, jobject dstObj, jlong dstOffset, @@ -382,8 +386,8 @@ static void Unsafe_copyMemoryToPrimitiveArray(JNIEnv *env, } } -static void Unsafe_copyMemoryFromPrimitiveArray(JNIEnv *env, - jobject unsafe ATTRIBUTE_UNUSED, +static void Unsafe_copyMemoryFromPrimitiveArray(JNIEnv* env, + [[maybe_unused]] jobject unsafe, jobject srcObj, jlong srcOffset, jlong dstAddr, diff --git a/runtime/native_stack_dump.cc b/runtime/native_stack_dump.cc index d6a0fae02f..bda912eceb 100644 --- a/runtime/native_stack_dump.cc +++ b/runtime/native_stack_dump.cc @@ -431,22 +431,20 @@ void DumpNativeStack(std::ostream& os, #elif defined(__APPLE__) -void DumpNativeStack(std::ostream& os ATTRIBUTE_UNUSED, - pid_t tid ATTRIBUTE_UNUSED, - const char* prefix ATTRIBUTE_UNUSED, - ArtMethod* current_method ATTRIBUTE_UNUSED, - void* ucontext_ptr ATTRIBUTE_UNUSED, - bool skip_frames ATTRIBUTE_UNUSED) { -} - -void DumpNativeStack(std::ostream& os ATTRIBUTE_UNUSED, - unwindstack::AndroidLocalUnwinder& existing_map ATTRIBUTE_UNUSED, - pid_t tid ATTRIBUTE_UNUSED, - const char* prefix ATTRIBUTE_UNUSED, - ArtMethod* current_method ATTRIBUTE_UNUSED, - void* ucontext_ptr ATTRIBUTE_UNUSED, - bool skip_frames ATTRIBUTE_UNUSED) { -} +void DumpNativeStack([[maybe_unused]] std::ostream& os, + [[maybe_unused]] pid_t tid, + [[maybe_unused]] const char* prefix, + [[maybe_unused]] ArtMethod* current_method, + [[maybe_unused]] void* ucontext_ptr, + [[maybe_unused]] bool skip_frames) {} + +void DumpNativeStack([[maybe_unused]] std::ostream& os, + [[maybe_unused]] unwindstack::AndroidLocalUnwinder& existing_map, + [[maybe_unused]] pid_t tid, + [[maybe_unused]] const char* prefix, + [[maybe_unused]] ArtMethod* current_method, + [[maybe_unused]] void* ucontext_ptr, + [[maybe_unused]] bool skip_frames) {} #else #error "Unsupported architecture for native stack dumps." diff --git a/runtime/noop_compiler_callbacks.h b/runtime/noop_compiler_callbacks.h index aed00149a6..1e4e701d97 100644 --- a/runtime/noop_compiler_callbacks.h +++ b/runtime/noop_compiler_callbacks.h @@ -26,9 +26,9 @@ class NoopCompilerCallbacks final : public CompilerCallbacks { NoopCompilerCallbacks() : CompilerCallbacks(CompilerCallbacks::CallbackMode::kCompileApp) {} ~NoopCompilerCallbacks() {} - void AddUncompilableMethod(MethodReference ref ATTRIBUTE_UNUSED) override {} - void AddUncompilableClass(ClassReference ref ATTRIBUTE_UNUSED) override {} - void ClassRejected(ClassReference ref ATTRIBUTE_UNUSED) override {} + void AddUncompilableMethod([[maybe_unused]] MethodReference ref) override {} + void AddUncompilableClass([[maybe_unused]] ClassReference ref) override {} + void ClassRejected([[maybe_unused]] ClassReference ref) override {} verifier::VerifierDeps* GetVerifierDeps() const override { return nullptr; } diff --git a/runtime/oat_file.cc b/runtime/oat_file.cc index 5f74584a74..c75a9ec19d 100644 --- a/runtime/oat_file.cc +++ b/runtime/oat_file.cc @@ -1159,12 +1159,12 @@ class DlOpenOatFile final : public OatFileBase { /*inout*/MemMap* reservation, // Where to load if not null. /*out*/std::string* error_msg) override; - bool Load(int oat_fd ATTRIBUTE_UNUSED, - bool writable ATTRIBUTE_UNUSED, - bool executable ATTRIBUTE_UNUSED, - bool low_4gb ATTRIBUTE_UNUSED, - /*inout*/MemMap* reservation ATTRIBUTE_UNUSED, - /*out*/std::string* error_msg ATTRIBUTE_UNUSED) override { + bool Load([[maybe_unused]] int oat_fd, + [[maybe_unused]] bool writable, + [[maybe_unused]] bool executable, + [[maybe_unused]] bool low_4gb, + [[maybe_unused]] /*inout*/ MemMap* reservation, + [[maybe_unused]] /*out*/ std::string* error_msg) override { return false; } @@ -1211,8 +1211,8 @@ void DlOpenOatFile::PreLoad() { #else // Count the entries in dl_iterate_phdr we get at this point in time. struct dl_iterate_context { - static int callback(dl_phdr_info* info ATTRIBUTE_UNUSED, - size_t size ATTRIBUTE_UNUSED, + static int callback([[maybe_unused]] dl_phdr_info* info, + [[maybe_unused]] size_t size, void* data) { reinterpret_cast<dl_iterate_context*>(data)->count++; return 0; // Continue iteration. @@ -1335,7 +1335,7 @@ bool DlOpenOatFile::Dlopen(const std::string& elf_filename, if (reservation != nullptr && dlopen_handle_ != nullptr) { // Find used pages from the reservation. struct dl_iterate_context { - static int callback(dl_phdr_info* info, size_t size ATTRIBUTE_UNUSED, void* data) { + static int callback(dl_phdr_info* info, [[maybe_unused]] size_t size, void* data) { auto* context = reinterpret_cast<dl_iterate_context*>(data); static_assert(std::is_same<Elf32_Half, Elf64_Half>::value, "Half must match"); using Elf_Half = Elf64_Half; @@ -1433,7 +1433,7 @@ void DlOpenOatFile::PreSetup(const std::string& elf_filename) { size_t memsz; }; struct dl_iterate_context { - static int callback(dl_phdr_info* info, size_t size ATTRIBUTE_UNUSED, void* data) { + static int callback(dl_phdr_info* info, [[maybe_unused]] size_t size, void* data) { auto* context = reinterpret_cast<dl_iterate_context*>(data); static_assert(std::is_same<Elf32_Half, Elf64_Half>::value, "Half must match"); using Elf_Half = Elf64_Half; @@ -1597,8 +1597,7 @@ class ElfOatFile final : public OatFileBase { /*inout*/MemMap* reservation, // Where to load if not null. /*out*/std::string* error_msg) override; - void PreSetup(const std::string& elf_filename ATTRIBUTE_UNUSED) override { - } + void PreSetup([[maybe_unused]] const std::string& elf_filename) override {} private: bool ElfFileOpen(File* file, @@ -1853,29 +1852,29 @@ class OatFileBackedByVdex final : public OatFileBase { protected: void PreLoad() override {} - bool Load(const std::string& elf_filename ATTRIBUTE_UNUSED, - bool writable ATTRIBUTE_UNUSED, - bool executable ATTRIBUTE_UNUSED, - bool low_4gb ATTRIBUTE_UNUSED, - MemMap* reservation ATTRIBUTE_UNUSED, - std::string* error_msg ATTRIBUTE_UNUSED) override { + bool Load([[maybe_unused]] const std::string& elf_filename, + [[maybe_unused]] bool writable, + [[maybe_unused]] bool executable, + [[maybe_unused]] bool low_4gb, + [[maybe_unused]] MemMap* reservation, + [[maybe_unused]] std::string* error_msg) override { LOG(FATAL) << "Unsupported"; UNREACHABLE(); } - bool Load(int oat_fd ATTRIBUTE_UNUSED, - bool writable ATTRIBUTE_UNUSED, - bool executable ATTRIBUTE_UNUSED, - bool low_4gb ATTRIBUTE_UNUSED, - MemMap* reservation ATTRIBUTE_UNUSED, - std::string* error_msg ATTRIBUTE_UNUSED) override { + bool Load([[maybe_unused]] int oat_fd, + [[maybe_unused]] bool writable, + [[maybe_unused]] bool executable, + [[maybe_unused]] bool low_4gb, + [[maybe_unused]] MemMap* reservation, + [[maybe_unused]] std::string* error_msg) override { LOG(FATAL) << "Unsupported"; UNREACHABLE(); } - void PreSetup(const std::string& elf_filename ATTRIBUTE_UNUSED) override {} + void PreSetup([[maybe_unused]] const std::string& elf_filename) override {} - const uint8_t* FindDynamicSymbolAddress(const std::string& symbol_name ATTRIBUTE_UNUSED, + const uint8_t* FindDynamicSymbolAddress([[maybe_unused]] const std::string& symbol_name, std::string* error_msg) const override { *error_msg = "Unsupported"; return nullptr; diff --git a/runtime/oat_file_assistant_test.cc b/runtime/oat_file_assistant_test.cc index 56d4c70920..1a5c57f902 100644 --- a/runtime/oat_file_assistant_test.cc +++ b/runtime/oat_file_assistant_test.cc @@ -1504,7 +1504,7 @@ class RaceGenerateTask : public Task { lock_(lock), loaded_oat_file_(nullptr) {} - void Run(Thread* self ATTRIBUTE_UNUSED) override { + void Run([[maybe_unused]] Thread* self) override { // Load the dex files, and save a pointer to the loaded oat file, so that // we can verify only one oat file was loaded for the dex location. std::vector<std::unique_ptr<const DexFile>> dex_files; diff --git a/runtime/runtime_callbacks_test.cc b/runtime/runtime_callbacks_test.cc index 6f0b8a1d20..2ba73cf47a 100644 --- a/runtime/runtime_callbacks_test.cc +++ b/runtime/runtime_callbacks_test.cc @@ -88,7 +88,7 @@ class RuntimeCallbacksTest : public CommonRuntimeTest { class ThreadLifecycleCallbackRuntimeCallbacksTest : public RuntimeCallbacksTest { public: - static void* PthreadsCallback(void* arg ATTRIBUTE_UNUSED) { + static void* PthreadsCallback([[maybe_unused]] void* arg) { // Attach. Runtime* runtime = Runtime::Current(); CHECK(runtime->AttachCurrentThread("ThreadLifecycle test thread", true, nullptr, false)); @@ -260,12 +260,12 @@ class ClassLoadCallbackRuntimeCallbacksTest : public RuntimeCallbacksTest { struct Callback : public ClassLoadCallback { void ClassPreDefine(const char* descriptor, - Handle<mirror::Class> klass ATTRIBUTE_UNUSED, - Handle<mirror::ClassLoader> class_loader ATTRIBUTE_UNUSED, + [[maybe_unused]] Handle<mirror::Class> klass, + [[maybe_unused]] Handle<mirror::ClassLoader> class_loader, const DexFile& initial_dex_file, - const dex::ClassDef& initial_class_def ATTRIBUTE_UNUSED, - /*out*/DexFile const** final_dex_file ATTRIBUTE_UNUSED, - /*out*/dex::ClassDef const** final_class_def ATTRIBUTE_UNUSED) override + [[maybe_unused]] const dex::ClassDef& initial_class_def, + [[maybe_unused]] /*out*/ DexFile const** final_dex_file, + [[maybe_unused]] /*out*/ dex::ClassDef const** final_class_def) override REQUIRES_SHARED(Locks::mutator_lock_) { const std::string& location = initial_dex_file.GetLocation(); std::string event = @@ -468,20 +468,20 @@ class MonitorWaitCallbacksTest : public RuntimeCallbacksTest { ref_ = { &k->GetDexFile(), k->GetDexClassDefIndex() }; } - void MonitorContendedLocking(Monitor* mon ATTRIBUTE_UNUSED) override - REQUIRES_SHARED(Locks::mutator_lock_) { } + void MonitorContendedLocking([[maybe_unused]] Monitor* mon) override + REQUIRES_SHARED(Locks::mutator_lock_) {} - void MonitorContendedLocked(Monitor* mon ATTRIBUTE_UNUSED) override - REQUIRES_SHARED(Locks::mutator_lock_) { } + void MonitorContendedLocked([[maybe_unused]] Monitor* mon) override + REQUIRES_SHARED(Locks::mutator_lock_) {} - void ObjectWaitStart(Handle<mirror::Object> obj, int64_t millis ATTRIBUTE_UNUSED) override + void ObjectWaitStart(Handle<mirror::Object> obj, [[maybe_unused]] int64_t millis) override REQUIRES_SHARED(Locks::mutator_lock_) { if (IsInterestingObject(obj.Get())) { saw_wait_start_ = true; } } - void MonitorWaitFinished(Monitor* m, bool timed_out ATTRIBUTE_UNUSED) override + void MonitorWaitFinished(Monitor* m, [[maybe_unused]] bool timed_out) override REQUIRES_SHARED(Locks::mutator_lock_) { if (IsInterestingObject(m->GetObject())) { saw_wait_finished_ = true; diff --git a/runtime/runtime_image.cc b/runtime/runtime_image.cc index f41d4c97f4..9be1d5eff5 100644 --- a/runtime/runtime_image.cc +++ b/runtime/runtime_image.cc @@ -668,7 +668,7 @@ class RuntimeImageHelper { explicit NativePointerVisitor(RuntimeImageHelper* helper) : helper_(helper) {} template <typename T> - T* operator()(T* ptr, void** dest_addr ATTRIBUTE_UNUSED) const { + T* operator()(T* ptr, [[maybe_unused]] void** dest_addr) const { return helper_->NativeLocationInImage(ptr, /* must_have_relocation= */ true); } @@ -1186,11 +1186,11 @@ class RuntimeImageHelper { : image_(image), copy_offset_(copy_offset) {} // We do not visit native roots. These are handled with other logic. - void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) - const { + void VisitRootIfNonNull( + [[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const { LOG(FATAL) << "UNREACHABLE"; } - void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const { + void VisitRoot([[maybe_unused]] mirror::CompressedReference<mirror::Object>* root) const { LOG(FATAL) << "UNREACHABLE"; } @@ -1209,9 +1209,8 @@ class RuntimeImageHelper { } // java.lang.ref.Reference visitor. - void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, - ObjPtr<mirror::Reference> ref) const - REQUIRES_SHARED(Locks::mutator_lock_) { + void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, + ObjPtr<mirror::Reference> ref) const REQUIRES_SHARED(Locks::mutator_lock_) { operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); } diff --git a/runtime/startup_completed_task.cc b/runtime/startup_completed_task.cc index 9358d48acc..a9a06bb0e1 100644 --- a/runtime/startup_completed_task.cc +++ b/runtime/startup_completed_task.cc @@ -82,7 +82,7 @@ void StartupCompletedTask::Run(Thread* self) { // - accessing the image space metadata section when we madvise it // - accessing dex caches when we free them static struct EmptyClosure : Closure { - void Run(Thread* thread ATTRIBUTE_UNUSED) override {} + void Run([[maybe_unused]] Thread* thread) override {} } closure; runtime->GetThreadList()->RunCheckpoint(&closure); diff --git a/runtime/string_builder_append.cc b/runtime/string_builder_append.cc index 0083b912a5..2071733a1e 100644 --- a/runtime/string_builder_append.cc +++ b/runtime/string_builder_append.cc @@ -492,7 +492,7 @@ inline void StringBuilderAppend::Builder::StoreData(ObjPtr<mirror::String> new_s } inline void StringBuilderAppend::Builder::operator()(ObjPtr<mirror::Object> obj, - size_t usable_size ATTRIBUTE_UNUSED) const { + [[maybe_unused]] size_t usable_size) const { ObjPtr<mirror::String> new_string = ObjPtr<mirror::String>::DownCast(obj); new_string->SetCount(length_with_flag_); if (mirror::String::IsCompressed(length_with_flag_)) { diff --git a/runtime/subtype_check_test.cc b/runtime/subtype_check_test.cc index 719e5d917c..5960bcc29d 100644 --- a/runtime/subtype_check_test.cc +++ b/runtime/subtype_check_test.cc @@ -89,8 +89,8 @@ struct MockClass { bool CasField32(art::MemberOffset offset, int32_t old_value, int32_t new_value, - CASMode mode ATTRIBUTE_UNUSED, - std::memory_order memory_order ATTRIBUTE_UNUSED) + [[maybe_unused]] CASMode mode, + [[maybe_unused]] std::memory_order memory_order) REQUIRES_SHARED(Locks::mutator_lock_) { UNUSED(offset); if (old_value == GetField32Volatile(offset)) { diff --git a/runtime/thread.cc b/runtime/thread.cc index 6b1934c86e..00a1468b2c 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -810,7 +810,7 @@ void Thread::InstallImplicitProtection() { // Keep space uninitialized as it can overflow the stack otherwise (should Clang actually // auto-initialize this local variable). volatile char space[kPageSize - (kAsanMultiplier * 256)] __attribute__((uninitialized)); - char sink ATTRIBUTE_UNUSED = space[zero]; // NOLINT + [[maybe_unused]] char sink = space[zero]; // Remove tag from the pointer. Nop in non-hwasan builds. uintptr_t addr = reinterpret_cast<uintptr_t>( __hwasan_tag_pointer != nullptr ? __hwasan_tag_pointer(space, 0) : space); @@ -2148,8 +2148,7 @@ struct StackDumpVisitor : public MonitorObjectsStackVisitor { static constexpr size_t kMaxRepetition = 3u; - VisitMethodResult StartMethod(ArtMethod* m, size_t frame_nr ATTRIBUTE_UNUSED) - override + VisitMethodResult StartMethod(ArtMethod* m, [[maybe_unused]] size_t frame_nr) override REQUIRES_SHARED(Locks::mutator_lock_) { m = m->GetInterfaceMethodIfProxy(kRuntimePointerSize); ObjPtr<mirror::DexCache> dex_cache = m->GetDexCache(); @@ -2194,12 +2193,11 @@ struct StackDumpVisitor : public MonitorObjectsStackVisitor { return VisitMethodResult::kContinueMethod; } - VisitMethodResult EndMethod(ArtMethod* m ATTRIBUTE_UNUSED) override { + VisitMethodResult EndMethod([[maybe_unused]] ArtMethod* m) override { return VisitMethodResult::kContinueMethod; } - void VisitWaitingObject(ObjPtr<mirror::Object> obj, ThreadState state ATTRIBUTE_UNUSED) - override + void VisitWaitingObject(ObjPtr<mirror::Object> obj, [[maybe_unused]] ThreadState state) override REQUIRES_SHARED(Locks::mutator_lock_) { PrintObject(obj, " - waiting on ", ThreadList::kInvalidThreadId); } @@ -2531,8 +2529,8 @@ class MonitorExitVisitor : public SingleRootVisitor { explicit MonitorExitVisitor(Thread* self) : self_(self) { } // NO_THREAD_SAFETY_ANALYSIS due to MonitorExit. - void VisitRoot(mirror::Object* entered_monitor, const RootInfo& info ATTRIBUTE_UNUSED) - override NO_THREAD_SAFETY_ANALYSIS { + void VisitRoot(mirror::Object* entered_monitor, + [[maybe_unused]] const RootInfo& info) override NO_THREAD_SAFETY_ANALYSIS { if (self_->HoldsLock(entered_monitor)) { LOG(WARNING) << "Calling MonitorExit on object " << entered_monitor << " (" << entered_monitor->PrettyTypeOf() << ")" @@ -3345,8 +3343,7 @@ jobjectArray Thread::CreateAnnotatedStackTrace(const ScopedObjectAccessAlreadyRu soaa_(soaa_in) {} protected: - VisitMethodResult StartMethod(ArtMethod* m, size_t frame_nr ATTRIBUTE_UNUSED) - override + VisitMethodResult StartMethod(ArtMethod* m, [[maybe_unused]] size_t frame_nr) override REQUIRES_SHARED(Locks::mutator_lock_) { ObjPtr<mirror::StackTraceElement> obj = CreateStackTraceElement( soaa_, m, GetDexPc(/* abort on error */ false)); @@ -3357,7 +3354,7 @@ jobjectArray Thread::CreateAnnotatedStackTrace(const ScopedObjectAccessAlreadyRu return VisitMethodResult::kContinueMethod; } - VisitMethodResult EndMethod(ArtMethod* m ATTRIBUTE_UNUSED) override { + VisitMethodResult EndMethod([[maybe_unused]] ArtMethod* m) override { lock_objects_.push_back({}); lock_objects_[lock_objects_.size() - 1].swap(frame_lock_objects_); @@ -3366,8 +3363,7 @@ jobjectArray Thread::CreateAnnotatedStackTrace(const ScopedObjectAccessAlreadyRu return VisitMethodResult::kContinueMethod; } - void VisitWaitingObject(ObjPtr<mirror::Object> obj, ThreadState state ATTRIBUTE_UNUSED) - override + void VisitWaitingObject(ObjPtr<mirror::Object> obj, [[maybe_unused]] ThreadState state) override REQUIRES_SHARED(Locks::mutator_lock_) { wait_jobject_.reset(soaa_.AddLocalReference<jobject>(obj)); } @@ -3377,9 +3373,8 @@ jobjectArray Thread::CreateAnnotatedStackTrace(const ScopedObjectAccessAlreadyRu wait_jobject_.reset(soaa_.AddLocalReference<jobject>(obj)); } void VisitBlockedOnObject(ObjPtr<mirror::Object> obj, - ThreadState state ATTRIBUTE_UNUSED, - uint32_t owner_tid ATTRIBUTE_UNUSED) - override + [[maybe_unused]] ThreadState state, + [[maybe_unused]] uint32_t owner_tid) override REQUIRES_SHARED(Locks::mutator_lock_) { block_jobject_.reset(soaa_.AddLocalReference<jobject>(obj)); } @@ -4271,26 +4266,23 @@ class ReferenceMapVisitor : public StackVisitor { void VisitQuickFrameNonPrecise() REQUIRES_SHARED(Locks::mutator_lock_) { struct UndefinedVRegInfo { - UndefinedVRegInfo(ArtMethod* method ATTRIBUTE_UNUSED, - const CodeInfo& code_info ATTRIBUTE_UNUSED, - const StackMap& map ATTRIBUTE_UNUSED, + UndefinedVRegInfo([[maybe_unused]] ArtMethod* method, + [[maybe_unused]] const CodeInfo& code_info, + [[maybe_unused]] const StackMap& map, RootVisitor& _visitor) - : visitor(_visitor) { - } + : visitor(_visitor) {} ALWAYS_INLINE void VisitStack(mirror::Object** ref, - size_t stack_index ATTRIBUTE_UNUSED, - const StackVisitor* stack_visitor) - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] size_t stack_index, + const StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) { visitor(ref, JavaFrameRootInfo::kImpreciseVreg, stack_visitor); } ALWAYS_INLINE void VisitRegister(mirror::Object** ref, - size_t register_index ATTRIBUTE_UNUSED, - const StackVisitor* stack_visitor) - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] size_t register_index, + const StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) { visitor(ref, JavaFrameRootInfo::kImpreciseVreg, stack_visitor); } @@ -4541,8 +4533,8 @@ void Thread::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) { class VerifyRootVisitor : public SingleRootVisitor { public: - void VisitRoot(mirror::Object* root, const RootInfo& info ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(Locks::mutator_lock_) { + void VisitRoot(mirror::Object* root, [[maybe_unused]] const RootInfo& info) override + REQUIRES_SHARED(Locks::mutator_lock_) { VerifyObject(root); } }; diff --git a/runtime/trace.cc b/runtime/trace.cc index 2e9f998d74..9045f50f21 100644 --- a/runtime/trace.cc +++ b/runtime/trace.cc @@ -349,7 +349,7 @@ static void GetSample(Thread* thread, void* arg) REQUIRES_SHARED(Locks::mutator_ the_trace->CompareAndUpdateStackTrace(thread, stack_trace); } -static void ClearThreadStackTraceAndClockBase(Thread* thread, void* arg ATTRIBUTE_UNUSED) { +static void ClearThreadStackTraceAndClockBase(Thread* thread, [[maybe_unused]] void* arg) { thread->SetTraceClockBase(0); std::vector<ArtMethod*>* stack_trace = thread->GetStackTraceSample(); thread->SetStackTraceSample(nullptr); @@ -489,7 +489,7 @@ void Trace::Start(std::unique_ptr<File>&& trace_file_in, auto deleter = [](File* file) { if (file != nullptr) { file->MarkUnchecked(); // Don't deal with flushing requirements. - int result ATTRIBUTE_UNUSED = file->Close(); + [[maybe_unused]] int result = file->Close(); delete file; } }; @@ -916,8 +916,8 @@ void Trace::FinishTracing() { } } -void Trace::DexPcMoved(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Object> this_object ATTRIBUTE_UNUSED, +void Trace::DexPcMoved([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Object> this_object, ArtMethod* method, uint32_t new_dex_pc) { // We're not recorded to listen to this kind of event, so complain. @@ -925,23 +925,22 @@ void Trace::DexPcMoved(Thread* thread ATTRIBUTE_UNUSED, << " " << new_dex_pc; } -void Trace::FieldRead(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Object> this_object ATTRIBUTE_UNUSED, +void Trace::FieldRead([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Object> this_object, ArtMethod* method, uint32_t dex_pc, - ArtField* field ATTRIBUTE_UNUSED) - REQUIRES_SHARED(Locks::mutator_lock_) { + [[maybe_unused]] ArtField* field) REQUIRES_SHARED(Locks::mutator_lock_) { // We're not recorded to listen to this kind of event, so complain. LOG(ERROR) << "Unexpected field read event in tracing " << ArtMethod::PrettyMethod(method) << " " << dex_pc; } -void Trace::FieldWritten(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Object> this_object ATTRIBUTE_UNUSED, +void Trace::FieldWritten([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Object> this_object, ArtMethod* method, uint32_t dex_pc, - ArtField* field ATTRIBUTE_UNUSED, - const JValue& field_value ATTRIBUTE_UNUSED) + [[maybe_unused]] ArtField* field, + [[maybe_unused]] const JValue& field_value) REQUIRES_SHARED(Locks::mutator_lock_) { // We're not recorded to listen to this kind of event, so complain. LOG(ERROR) << "Unexpected field write event in tracing " << ArtMethod::PrettyMethod(method) @@ -957,31 +956,29 @@ void Trace::MethodEntered(Thread* thread, ArtMethod* method) { void Trace::MethodExited(Thread* thread, ArtMethod* method, - instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED, - JValue& return_value ATTRIBUTE_UNUSED) { + [[maybe_unused]] instrumentation::OptionalFrame frame, + [[maybe_unused]] JValue& return_value) { uint32_t thread_clock_diff = 0; uint64_t timestamp_counter = 0; ReadClocks(thread, &thread_clock_diff, ×tamp_counter); LogMethodTraceEvent(thread, method, kTraceMethodExit, thread_clock_diff, timestamp_counter); } -void Trace::MethodUnwind(Thread* thread, - ArtMethod* method, - uint32_t dex_pc ATTRIBUTE_UNUSED) { +void Trace::MethodUnwind(Thread* thread, ArtMethod* method, [[maybe_unused]] uint32_t dex_pc) { uint32_t thread_clock_diff = 0; uint64_t timestamp_counter = 0; ReadClocks(thread, &thread_clock_diff, ×tamp_counter); LogMethodTraceEvent(thread, method, kTraceUnroll, thread_clock_diff, timestamp_counter); } -void Trace::ExceptionThrown(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Throwable> exception_object ATTRIBUTE_UNUSED) +void Trace::ExceptionThrown([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Throwable> exception_object) REQUIRES_SHARED(Locks::mutator_lock_) { LOG(ERROR) << "Unexpected exception thrown event in tracing"; } -void Trace::ExceptionHandled(Thread* thread ATTRIBUTE_UNUSED, - Handle<mirror::Throwable> exception_object ATTRIBUTE_UNUSED) +void Trace::ExceptionHandled([[maybe_unused]] Thread* thread, + [[maybe_unused]] Handle<mirror::Throwable> exception_object) REQUIRES_SHARED(Locks::mutator_lock_) { LOG(ERROR) << "Unexpected exception thrown event in tracing"; } @@ -992,8 +989,8 @@ void Trace::Branch(Thread* /*thread*/, ArtMethod* method, LOG(ERROR) << "Unexpected branch event in tracing" << ArtMethod::PrettyMethod(method); } -void Trace::WatchedFramePop(Thread* self ATTRIBUTE_UNUSED, - const ShadowFrame& frame ATTRIBUTE_UNUSED) { +void Trace::WatchedFramePop([[maybe_unused]] Thread* self, + [[maybe_unused]] const ShadowFrame& frame) { LOG(ERROR) << "Unexpected WatchedFramePop event in tracing"; } diff --git a/runtime/vdex_file.cc b/runtime/vdex_file.cc index bdead55390..d55876da2f 100644 --- a/runtime/vdex_file.cc +++ b/runtime/vdex_file.cc @@ -57,7 +57,7 @@ bool VdexFile::VdexFileHeader::IsVdexVersionValid() const { return (memcmp(vdex_version_, kVdexVersion, sizeof(kVdexVersion)) == 0); } -VdexFile::VdexFileHeader::VdexFileHeader(bool has_dex_section ATTRIBUTE_UNUSED) +VdexFile::VdexFileHeader::VdexFileHeader([[maybe_unused]] bool has_dex_section) : number_of_sections_(static_cast<uint32_t>(VdexSection::kNumberOfSections)) { memcpy(magic_, kVdexMagic, sizeof(kVdexMagic)); memcpy(vdex_version_, kVdexVersion, sizeof(kVdexVersion)); diff --git a/runtime/verifier/reg_type.h b/runtime/verifier/reg_type.h index 965bbaf080..c13784c75f 100644 --- a/runtime/verifier/reg_type.h +++ b/runtime/verifier/reg_type.h @@ -312,7 +312,7 @@ class RegType { cache_id_(cache_id) {} template <typename Class> - void CheckConstructorInvariants(Class* this_ ATTRIBUTE_UNUSED) const + void CheckConstructorInvariants([[maybe_unused]] Class* this_) const REQUIRES_SHARED(Locks::mutator_lock_) { static_assert(std::is_final<Class>::value, "Class must be final."); if (kIsDebugBuild) { diff --git a/runtime/write_barrier-inl.h b/runtime/write_barrier-inl.h index af8c1be828..ee6b336f05 100644 --- a/runtime/write_barrier-inl.h +++ b/runtime/write_barrier-inl.h @@ -28,7 +28,7 @@ namespace art { template <WriteBarrier::NullCheck kNullCheck> inline void WriteBarrier::ForFieldWrite(ObjPtr<mirror::Object> dst, - MemberOffset offset ATTRIBUTE_UNUSED, + [[maybe_unused]] MemberOffset offset, ObjPtr<mirror::Object> new_value) { if (kNullCheck == kWithNullCheck && new_value == nullptr) { return; @@ -38,8 +38,8 @@ inline void WriteBarrier::ForFieldWrite(ObjPtr<mirror::Object> dst, } inline void WriteBarrier::ForArrayWrite(ObjPtr<mirror::Object> dst, - int start_offset ATTRIBUTE_UNUSED, - size_t length ATTRIBUTE_UNUSED) { + [[maybe_unused]] int start_offset, + [[maybe_unused]] size_t length) { GetCardTable()->MarkCard(dst.Ptr()); } diff --git a/runtime/write_barrier.h b/runtime/write_barrier.h index 112154e14a..8080b0db65 100644 --- a/runtime/write_barrier.h +++ b/runtime/write_barrier.h @@ -38,15 +38,15 @@ class WriteBarrier { // safe-point. The call is not needed if null is stored in the field. template <NullCheck kNullCheck = kWithNullCheck> ALWAYS_INLINE static void ForFieldWrite(ObjPtr<mirror::Object> dst, - MemberOffset offset ATTRIBUTE_UNUSED, - ObjPtr<mirror::Object> new_value ATTRIBUTE_UNUSED) + [[maybe_unused]] MemberOffset offset, + [[maybe_unused]] ObjPtr<mirror::Object> new_value) REQUIRES_SHARED(Locks::mutator_lock_); // Must be called if a reference field of an ObjectArray in the heap changes, and before any GC // safe-point. The call is not needed if null is stored in the field. ALWAYS_INLINE static void ForArrayWrite(ObjPtr<mirror::Object> dst, - int start_offset ATTRIBUTE_UNUSED, - size_t length ATTRIBUTE_UNUSED) + [[maybe_unused]] int start_offset, + [[maybe_unused]] size_t length) REQUIRES_SHARED(Locks::mutator_lock_); // Write barrier for every reference field in an object. diff --git a/sigchainlib/sigchain_fake.cc b/sigchainlib/sigchain_fake.cc index 23861540fc..0e62eebf8c 100644 --- a/sigchainlib/sigchain_fake.cc +++ b/sigchainlib/sigchain_fake.cc @@ -20,8 +20,6 @@ #include "log.h" #include "sigchain.h" -#define ATTRIBUTE_UNUSED __attribute__((__unused__)) - // We cannot annotate the declarations, as they are not no-return in the non-fake version. #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunknown-pragmas" @@ -29,24 +27,24 @@ namespace art { -extern "C" void EnsureFrontOfChain(int signal ATTRIBUTE_UNUSED) { +extern "C" void EnsureFrontOfChain([[maybe_unused]] int signal) { log("EnsureFrontOfChain is not exported by the main executable."); abort(); } -extern "C" void AddSpecialSignalHandlerFn(int signal ATTRIBUTE_UNUSED, - SigchainAction* sa ATTRIBUTE_UNUSED) { +extern "C" void AddSpecialSignalHandlerFn([[maybe_unused]] int signal, + [[maybe_unused]] SigchainAction* sa) { log("SetSpecialSignalHandlerFn is not exported by the main executable."); abort(); } -extern "C" void RemoveSpecialSignalHandlerFn(int signal ATTRIBUTE_UNUSED, - bool (*fn)(int, siginfo_t*, void*) ATTRIBUTE_UNUSED) { +extern "C" void RemoveSpecialSignalHandlerFn([[maybe_unused]] int signal, + [[maybe_unused]] bool (*fn)(int, siginfo_t*, void*)) { log("SetSpecialSignalHandlerFn is not exported by the main executable."); abort(); } -extern "C" void SkipAddSignalHandler(bool value ATTRIBUTE_UNUSED) { +extern "C" void SkipAddSignalHandler([[maybe_unused]] bool value) { log("SkipAddSignalHandler is not exported by the main executable."); abort(); } diff --git a/sigchainlib/sigchain_test.cc b/sigchainlib/sigchain_test.cc index 5e9c7fe7cb..d8ff4d58a2 100644 --- a/sigchainlib/sigchain_test.cc +++ b/sigchainlib/sigchain_test.cc @@ -267,8 +267,8 @@ DISABLE_HWASAN void fault_address_tag_impl() { ASSERT_EQ(0, sigaction(SIGSEGV, &action, nullptr)); auto* tagged_null = reinterpret_cast<int*>(0x2bULL << 56); - EXPECT_EXIT({ volatile int load __attribute__((unused)) = *tagged_null; }, - testing::ExitedWithCode(0), ""); + EXPECT_EXIT( + { [[maybe_unused]] volatile int load = *tagged_null; }, testing::ExitedWithCode(0), ""); // Our sigaction implementation always implements the "clear unknown bits" // semantics for oldact.sa_flags regardless of kernel version so we rely on it @@ -277,8 +277,9 @@ DISABLE_HWASAN void fault_address_tag_impl() { ASSERT_EQ(0, sigaction(SIGSEGV, &action, nullptr)); ASSERT_EQ(0, sigaction(SIGSEGV, nullptr, &action)); if (action.sa_flags & SA_EXPOSE_TAGBITS) { - EXPECT_EXIT({ volatile int load __attribute__((unused)) = *tagged_null; }, - testing::ExitedWithCode(0x2b), ""); + EXPECT_EXIT({ [[maybe_unused]] volatile int load = *tagged_null; }, + testing::ExitedWithCode(0x2b), + ""); } } #endif diff --git a/test/004-SignalTest/signaltest.cc b/test/004-SignalTest/signaltest.cc index 18b4502a3f..916fb57a66 100644 --- a/test/004-SignalTest/signaltest.cc +++ b/test/004-SignalTest/signaltest.cc @@ -53,15 +53,15 @@ static const int kMaxSignal = 1; #define BLOCKED_SIGNAL SIGUSR1 #define UNBLOCKED_SIGNAL SIGUSR2 -static void blocked_signal(int sig ATTRIBUTE_UNUSED) { +static void blocked_signal([[maybe_unused]] int sig) { printf("blocked signal received\n"); } -static void unblocked_signal(int sig ATTRIBUTE_UNUSED) { +static void unblocked_signal([[maybe_unused]] int sig) { printf("unblocked signal received\n"); } -static void signalhandler(int sig ATTRIBUTE_UNUSED, siginfo_t* info ATTRIBUTE_UNUSED, +static void signalhandler([[maybe_unused]] int sig, [[maybe_unused]] siginfo_t* info, void* context) { printf("signal caught\n"); ++signal_count; diff --git a/test/044-proxy/native_proxy.cc b/test/044-proxy/native_proxy.cc index f3178f9c2a..e86c5a8424 100644 --- a/test/044-proxy/native_proxy.cc +++ b/test/044-proxy/native_proxy.cc @@ -21,7 +21,7 @@ namespace art { extern "C" JNIEXPORT void JNICALL Java_NativeProxy_nativeCall( - JNIEnv* env, jclass clazz ATTRIBUTE_UNUSED, jobject inf_ref) { + JNIEnv* env, [[maybe_unused]] jclass clazz, jobject inf_ref) { jclass native_inf_class = env->FindClass("NativeInterface"); CHECK(native_inf_class != nullptr); jmethodID mid = env->GetMethodID(native_inf_class, "callback", "()V"); diff --git a/test/051-thread/thread_test.cc b/test/051-thread/thread_test.cc index 33841eba5a..6c7092396a 100644 --- a/test/051-thread/thread_test.cc +++ b/test/051-thread/thread_test.cc @@ -21,13 +21,12 @@ namespace art { extern "C" JNIEXPORT jint JNICALL Java_Main_getNativePriority(JNIEnv* env, - jclass clazz ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass clazz) { return Thread::ForEnv(env)->GetNativePriority(); } extern "C" JNIEXPORT jboolean JNICALL Java_Main_supportsThreadPriorities( - JNIEnv* env ATTRIBUTE_UNUSED, - jclass clazz ATTRIBUTE_UNUSED) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass clazz) { #if defined(ART_TARGET_ANDROID) return JNI_TRUE; #else diff --git a/test/115-native-bridge/nativebridge.cc b/test/115-native-bridge/nativebridge.cc index 4388747362..65d7fcd315 100644 --- a/test/115-native-bridge/nativebridge.cc +++ b/test/115-native-bridge/nativebridge.cc @@ -185,9 +185,9 @@ static jchar trampoline_Java_Main_charMethod(JNIEnv* env, jclass klass, jchar c1 // This code is adapted from 004-SignalTest and causes a segfault. char *go_away_compiler = nullptr; -[[ noreturn ]] static void test_sigaction_handler(int sig ATTRIBUTE_UNUSED, - siginfo_t* info ATTRIBUTE_UNUSED, - void* context ATTRIBUTE_UNUSED) { +[[ noreturn ]] static void test_sigaction_handler([[maybe_unused]] int sig, + [[maybe_unused]] siginfo_t* info, + [[maybe_unused]] void* context) { printf("Should not reach the test sigaction handler."); abort(); } @@ -423,7 +423,7 @@ static NativeBridgeMethod* find_native_bridge_method(const char *name) { // NativeBridgeCallbacks implementations extern "C" bool native_bridge_initialize(const android::NativeBridgeRuntimeCallbacks* art_cbs, const char* app_code_cache_dir, - const char* isa ATTRIBUTE_UNUSED) { + [[maybe_unused]] const char* isa) { struct stat st; if (app_code_cache_dir != nullptr) { if (stat(app_code_cache_dir, &st) == 0) { @@ -471,7 +471,7 @@ extern "C" void* native_bridge_loadLibrary(const char* libpath, int flag) { } extern "C" void* native_bridge_getTrampoline(void* handle, const char* name, const char* shorty, - uint32_t len ATTRIBUTE_UNUSED) { + [[maybe_unused]] uint32_t len) { printf("Getting trampoline for %s with shorty %s.\n", name, shorty); // The name here is actually the JNI name, so we can directly do the lookup. @@ -532,7 +532,7 @@ extern "C" const struct android::NativeBridgeRuntimeValues* native_bridge_getApp // v2 parts. -extern "C" bool native_bridge_isCompatibleWith(uint32_t bridge_version ATTRIBUTE_UNUSED) { +extern "C" bool native_bridge_isCompatibleWith([[maybe_unused]] uint32_t bridge_version) { return true; } @@ -557,7 +557,7 @@ extern "C" bool native_bridge_isCompatibleWith(uint32_t bridge_version ATTRIBUTE #endif #endif -static bool StandardSignalHandler(int sig, siginfo_t* info ATTRIBUTE_UNUSED, void* context) { +static bool StandardSignalHandler(int sig, [[maybe_unused]] siginfo_t* info, void* context) { if (sig == SIGSEGV) { #if defined(__arm__) ucontext_t* uc = reinterpret_cast<ucontext_t*>(context); @@ -610,7 +610,7 @@ static ::android::NativeBridgeSignalHandlerFn native_bridge_getSignalHandler(int return nullptr; } -extern "C" int native_bridge_unloadLibrary(void* handle ATTRIBUTE_UNUSED) { +extern "C" int native_bridge_unloadLibrary([[maybe_unused]] void* handle) { printf("dlclose() in native bridge.\n"); return 0; } @@ -620,40 +620,43 @@ extern "C" const char* native_bridge_getError() { return ""; } -extern "C" bool native_bridge_isPathSupported(const char* library_path ATTRIBUTE_UNUSED) { +extern "C" bool native_bridge_isPathSupported([[maybe_unused]] const char* library_path) { printf("Checking for path support in native bridge.\n"); return false; } -extern "C" bool native_bridge_initAnonymousNamespace(const char* public_ns_sonames ATTRIBUTE_UNUSED, - const char* anon_ns_library_path ATTRIBUTE_UNUSED) { +extern "C" bool native_bridge_initAnonymousNamespace( + [[maybe_unused]] const char* public_ns_sonames, + [[maybe_unused]] const char* anon_ns_library_path) { printf("Initializing anonymous namespace in native bridge.\n"); return false; } extern "C" android::native_bridge_namespace_t* -native_bridge_createNamespace(const char* name ATTRIBUTE_UNUSED, - const char* ld_library_path ATTRIBUTE_UNUSED, - const char* default_library_path ATTRIBUTE_UNUSED, - uint64_t type ATTRIBUTE_UNUSED, - const char* permitted_when_isolated_path ATTRIBUTE_UNUSED, - android::native_bridge_namespace_t* parent_ns ATTRIBUTE_UNUSED) { +native_bridge_createNamespace([[maybe_unused]] const char* name, + [[maybe_unused]] const char* ld_library_path, + [[maybe_unused]] const char* default_library_path, + [[maybe_unused]] uint64_t type, + [[maybe_unused]] const char* permitted_when_isolated_path, + [[maybe_unused]] android::native_bridge_namespace_t* parent_ns) { printf("Creating namespace in native bridge.\n"); return nullptr; } -extern "C" bool native_bridge_linkNamespaces(android::native_bridge_namespace_t* from ATTRIBUTE_UNUSED, - android::native_bridge_namespace_t* to ATTRIBUTE_UNUSED, - const char* shared_libs_sonames ATTRIBUTE_UNUSED) { +extern "C" bool native_bridge_linkNamespaces( + [[maybe_unused]] android::native_bridge_namespace_t* from, + [[maybe_unused]] android::native_bridge_namespace_t* to, + [[maybe_unused]] const char* shared_libs_sonames) { printf("Linking namespaces in native bridge.\n"); return false; } -extern "C" void* native_bridge_loadLibraryExt(const char* libpath ATTRIBUTE_UNUSED, - int flag ATTRIBUTE_UNUSED, - android::native_bridge_namespace_t* ns ATTRIBUTE_UNUSED) { - printf("Loading library with Extension in native bridge.\n"); - return nullptr; +extern "C" void* native_bridge_loadLibraryExt( + [[maybe_unused]] const char* libpath, + [[maybe_unused]] int flag, + [[maybe_unused]] android::native_bridge_namespace_t* ns) { + printf("Loading library with Extension in native bridge.\n"); + return nullptr; } // "NativeBridgeItf" is effectively an API (it is the name of the symbol that will be loaded diff --git a/test/137-cfi/cfi.cc b/test/137-cfi/cfi.cc index cd5b4cf35b..29a6ff581a 100644 --- a/test/137-cfi/cfi.cc +++ b/test/137-cfi/cfi.cc @@ -212,7 +212,7 @@ static constexpr int kSleepTimeMicroseconds = 50000; // 0.05 seconds static constexpr int kMaxTotalSleepTimeMicroseconds = 10000000; // 10 seconds // Wait for a sigstop. This code is copied from libbacktrace. -int wait_for_sigstop(pid_t tid, int* total_sleep_time_usec, bool* detach_failed ATTRIBUTE_UNUSED) { +int wait_for_sigstop(pid_t tid, int* total_sleep_time_usec, [[maybe_unused]] bool* detach_failed) { for (;;) { int status; pid_t n = TEMP_FAILURE_RETRY(waitpid(tid, &status, __WALL | WNOHANG | WUNTRACED)); diff --git a/test/1919-vminit-thread-start-timing/vminit.cc b/test/1919-vminit-thread-start-timing/vminit.cc index ddf6649769..231896161d 100644 --- a/test/1919-vminit-thread-start-timing/vminit.cc +++ b/test/1919-vminit-thread-start-timing/vminit.cc @@ -65,7 +65,7 @@ static void JNICALL ThreadStartCallback(jvmtiEnv *jvmti, JNIEnv* env, jthread th static void JNICALL Test1919AgentThread(jvmtiEnv* jvmti, JNIEnv* env, - void* arg ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* arg) { EventList* list = nullptr; CheckJvmtiError(jvmti, jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&list))); CheckJvmtiError(jvmti, jvmti->RawMonitorEnter(list->events_mutex)); @@ -140,8 +140,8 @@ static void InstallEventList(jvmtiEnv* env) { } jint OnLoad(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0) != 0) { printf("Unable to get jvmti env!\n"); return 1; diff --git a/test/1922-owned-monitors-info/owned_monitors.cc b/test/1922-owned-monitors-info/owned_monitors.cc index 66a83689a2..e95c914b43 100644 --- a/test/1922-owned-monitors-info/owned_monitors.cc +++ b/test/1922-owned-monitors-info/owned_monitors.cc @@ -68,7 +68,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test1922_00024Target_lockThisNative( } extern "C" JNIEXPORT void JNICALL Java_art_Test1922_00024Target_lockNative( - JNIEnv* env, jobject thiz ATTRIBUTE_UNUSED, jobject mon, jobject next) { + JNIEnv* env, [[maybe_unused]] jobject thiz, jobject mon, jobject next) { if (doMonitorEnter(env, mon)) { return; } diff --git a/test/1936-thread-end-events/method_trace.cc b/test/1936-thread-end-events/method_trace.cc index 019b6a9a24..edfff907a0 100644 --- a/test/1936-thread-end-events/method_trace.cc +++ b/test/1936-thread-end-events/method_trace.cc @@ -52,8 +52,8 @@ extern "C" JNIEXPORT jobject JNICALL Java_art_Test989_returnValueNative(JNIEnv* return env->CallStaticObjectMethod(klass, targetMethod); } -extern "C" JNIEXPORT void JNICALL Java_art_Test989_doNothingNative(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT void JNICALL Java_art_Test989_doNothingNative([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { return; } diff --git a/test/1945-proxy-method-arguments/get_args.cc b/test/1945-proxy-method-arguments/get_args.cc index 859e229d9e..5191761ca5 100644 --- a/test/1945-proxy-method-arguments/get_args.cc +++ b/test/1945-proxy-method-arguments/get_args.cc @@ -104,7 +104,7 @@ jobject GetProxyReferenceArgument(size_t arg_pos, size_t proxy_method_frame_dept } extern "C" JNIEXPORT jobject JNICALL Java_TestInvocationHandler_getArgument( - JNIEnv* env ATTRIBUTE_UNUSED, jobject thiz ATTRIBUTE_UNUSED, int arg_pos, int frame_depth) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jobject thiz, int arg_pos, int frame_depth) { return GetProxyReferenceArgument(arg_pos, frame_depth); } diff --git a/test/1950-unprepared-transform/unprepared_transform.cc b/test/1950-unprepared-transform/unprepared_transform.cc index 620ede887f..93c4b3e7b8 100644 --- a/test/1950-unprepared-transform/unprepared_transform.cc +++ b/test/1950-unprepared-transform/unprepared_transform.cc @@ -37,15 +37,15 @@ namespace Test1950UnpreparedTransform { jclass kMainClass = nullptr; jmethodID kPrepareFunc = nullptr; -extern "C" JNIEXPORT void ClassLoadCallback(jvmtiEnv* jvmti ATTRIBUTE_UNUSED, - JNIEnv* env, - jthread thr ATTRIBUTE_UNUSED, - jclass klass) { +extern "C" JNIEXPORT void ClassLoadCallback([[maybe_unused]] jvmtiEnv* jvmti, + JNIEnv* env, + [[maybe_unused]] jthread thr, + jclass klass) { env->CallStaticVoidMethod(kMainClass, kPrepareFunc, klass); } extern "C" JNIEXPORT void JNICALL Java_Main_clearClassLoadHook( - JNIEnv* env, jclass main ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass main, jthread thr) { JvmtiErrorToException(env, jvmti_env, jvmti_env->SetEventNotificationMode(JVMTI_DISABLE, diff --git a/test/1953-pop-frame/pop_frame.cc b/test/1953-pop-frame/pop_frame.cc index 86345d65dc..9b3af96e29 100644 --- a/test/1953-pop-frame/pop_frame.cc +++ b/test/1953-pop-frame/pop_frame.cc @@ -44,7 +44,7 @@ namespace Test1953PopFrame { extern "C" JNIEXPORT void JNICALL Java_art_Test1953_popFrame(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jthread thr) { JvmtiErrorToException(env, jvmti_env, jvmti_env->PopFrame(thr)); } diff --git a/test/1957-error-ext/lasterror.cc b/test/1957-error-ext/lasterror.cc index 5aa3fbe9fb..41c5f13ffd 100644 --- a/test/1957-error-ext/lasterror.cc +++ b/test/1957-error-ext/lasterror.cc @@ -84,7 +84,7 @@ static jvmtiExtensionFunction FindExtensionMethod(JNIEnv* env, const std::string } extern "C" JNIEXPORT -jstring JNICALL Java_art_Test1957_getLastError(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { +jstring JNICALL Java_art_Test1957_getLastError(JNIEnv* env, [[maybe_unused]] jclass klass) { GetLastError get_last_error = reinterpret_cast<GetLastError>( FindExtensionMethod(env, "com.android.art.misc.get_last_error_message")); if (get_last_error == nullptr) { @@ -99,7 +99,7 @@ jstring JNICALL Java_art_Test1957_getLastError(JNIEnv* env, jclass klass ATTRIBU } extern "C" JNIEXPORT -void JNICALL Java_art_Test1957_clearLastError(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { +void JNICALL Java_art_Test1957_clearLastError(JNIEnv* env, [[maybe_unused]] jclass klass) { ClearLastError clear_last_error = reinterpret_cast<ClearLastError>( FindExtensionMethod(env, "com.android.art.misc.clear_last_error_message")); if (clear_last_error == nullptr) { diff --git a/test/1959-redefine-object-instrument/fake_redef_object.cc b/test/1959-redefine-object-instrument/fake_redef_object.cc index b1201abaa4..a5b6a7d366 100644 --- a/test/1959-redefine-object-instrument/fake_redef_object.cc +++ b/test/1959-redefine-object-instrument/fake_redef_object.cc @@ -39,10 +39,10 @@ namespace Test1959RedefineObjectInstrument { // Just pull it out of the dex file but don't bother changing anything. static void JNICALL RedefineObjectHook(jvmtiEnv *jvmti_env, JNIEnv* env, - jclass class_being_redefined ATTRIBUTE_UNUSED, - jobject loader ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass class_being_redefined, + [[maybe_unused]] jobject loader, const char* name, - jobject protection_domain ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject protection_domain, jint class_data_len, const unsigned char* class_data, jint* new_class_data_len, @@ -93,7 +93,7 @@ static void JNICALL RedefineObjectHook(jvmtiEnv *jvmti_env, } extern "C" JNIEXPORT void JNICALL Java_Main_forceRedefine(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jclass obj_class, jthread thr) { if (IsJVM()) { diff --git a/test/1962-multi-thread-events/multi_thread_events.cc b/test/1962-multi-thread-events/multi_thread_events.cc index aeb15b0c8c..f27640a96e 100644 --- a/test/1962-multi-thread-events/multi_thread_events.cc +++ b/test/1962-multi-thread-events/multi_thread_events.cc @@ -38,8 +38,8 @@ void cbMethodEntry(jvmtiEnv* jvmti, JNIEnv* env, jthread thread, jmethodID method, - jboolean was_exception ATTRIBUTE_UNUSED, - jvalue val ATTRIBUTE_UNUSED) { + [[maybe_unused]] jboolean was_exception, + [[maybe_unused]] jvalue val) { BreakpointData* data = nullptr; if (JvmtiErrorToException( env, jvmti, jvmti->GetThreadLocalStorage(thread, reinterpret_cast<void**>(&data)))) { @@ -56,7 +56,7 @@ void cbMethodEntry(jvmtiEnv* jvmti, } extern "C" JNIEXPORT void JNICALL Java_art_Test1962_setupTest(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass klass) { jvmtiCapabilities caps{ .can_generate_method_exit_events = 1, }; @@ -70,7 +70,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test1962_setupTest(JNIEnv* env, } extern "C" JNIEXPORT void JNICALL Java_art_Test1962_setupThread( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr, jobject events, jobject target) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr, jobject events, jobject target) { BreakpointData* data = nullptr; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->Allocate(sizeof(*data), reinterpret_cast<uint8_t**>(&data)))) { diff --git a/test/1972-jni-id-swap-indices/jni_id.cc b/test/1972-jni-id-swap-indices/jni_id.cc index 7de7131ca8..f3c2a62fc4 100644 --- a/test/1972-jni-id-swap-indices/jni_id.cc +++ b/test/1972-jni-id-swap-indices/jni_id.cc @@ -27,7 +27,7 @@ namespace art { extern "C" JNIEXPORT jlong JNICALL Java_Main_GetMethodId(JNIEnv* env, - jclass k ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass k, bool is_static, jclass target, jstring name, @@ -42,18 +42,18 @@ extern "C" JNIEXPORT jlong JNICALL Java_Main_GetMethodId(JNIEnv* env, return res; } -extern "C" JNIEXPORT jobject JNICALL Java_Main_GetJniType(JNIEnv* env, jclass k ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT jobject JNICALL Java_Main_GetJniType(JNIEnv* env, [[maybe_unused]] jclass k) { std::ostringstream oss; oss << Runtime::Current()->GetJniIdType(); return env->NewStringUTF(oss.str().c_str()); } -extern "C" JNIEXPORT void JNICALL Java_Main_SetToPointerIds(JNIEnv* env ATTRIBUTE_UNUSED, - jclass k ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT void JNICALL Java_Main_SetToPointerIds([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass k) { Runtime::Current()->SetJniIdType(JniIdType::kPointer); } -extern "C" JNIEXPORT void JNICALL Java_Main_SetToIndexIds(JNIEnv* env ATTRIBUTE_UNUSED, - jclass k ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT void JNICALL Java_Main_SetToIndexIds([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass k) { Runtime::Current()->SetJniIdType(JniIdType::kIndices); } diff --git a/test/1974-resize-array/resize_array.cc b/test/1974-resize-array/resize_array.cc index 60037b8045..17468212c6 100644 --- a/test/1974-resize-array/resize_array.cc +++ b/test/1974-resize-array/resize_array.cc @@ -110,7 +110,7 @@ static jvmtiExtensionFunction FindExtensionMethod(JNIEnv* env, const std::string } extern "C" JNIEXPORT void JNICALL Java_art_Test1974_ResizeArray(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobject ref_gen, jint new_size) { ChangeArraySize change_array_size = reinterpret_cast<ChangeArraySize>( @@ -125,24 +125,24 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test1974_ResizeArray(JNIEnv* env, } extern "C" JNIEXPORT jobject JNICALL Java_art_Test1974_ReadJniRef(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jlong r) { return env->NewLocalRef(reinterpret_cast<jobject>(static_cast<intptr_t>(r))); } extern "C" JNIEXPORT jlong JNICALL -Java_art_Test1974_GetWeakGlobalJniRef(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject r) { +Java_art_Test1974_GetWeakGlobalJniRef(JNIEnv* env, [[maybe_unused]] jclass klass, jobject r) { return static_cast<jlong>(reinterpret_cast<intptr_t>(env->NewWeakGlobalRef(r))); } extern "C" JNIEXPORT jlong JNICALL Java_art_Test1974_GetGlobalJniRef(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobject r) { return static_cast<jlong>(reinterpret_cast<intptr_t>(env->NewGlobalRef(r))); } extern "C" JNIEXPORT jobjectArray JNICALL -Java_art_Test1974_GetObjectsWithTag(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) { +Java_art_Test1974_GetObjectsWithTag(JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag) { jsize cnt = 0; jobject* res = nullptr; if (JvmtiErrorToException( @@ -161,7 +161,7 @@ Java_art_Test1974_GetObjectsWithTag(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, } extern "C" JNIEXPORT void JNICALL Java_art_Test1974_runNativeTest(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobjectArray arr, jobject resize, jobject print, @@ -181,7 +181,7 @@ struct JvmtiInfo { }; extern "C" JNIEXPORT void JNICALL Java_art_Test1974_StartCollectFrees(JNIEnv* env, - jclass k ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass k) { jvmtiEventCallbacks cb{ .ObjectFree = [](jvmtiEnv* jvmti, jlong tag) { @@ -208,14 +208,14 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test1974_StartCollectFrees(JNIEnv* en } extern "C" JNIEXPORT void JNICALL -Java_art_Test1974_StartAssignObsoleteIncrementedId(JNIEnv* env, jclass k ATTRIBUTE_UNUSED) { +Java_art_Test1974_StartAssignObsoleteIncrementedId(JNIEnv* env, [[maybe_unused]] jclass k) { jint id = FindExtensionEvent(env, "com.android.art.heap.obsolete_object_created"); if (env->ExceptionCheck()) { LOG(INFO) << "Could not find extension event!"; return; } using ObsoleteEvent = void (*)(jvmtiEnv * env, jlong * obsolete, jlong * non_obsolete); - ObsoleteEvent oe = [](jvmtiEnv* env ATTRIBUTE_UNUSED, jlong* obsolete, jlong* non_obsolete) { + ObsoleteEvent oe = []([[maybe_unused]] jvmtiEnv* env, jlong* obsolete, jlong* non_obsolete) { *non_obsolete = *obsolete; *obsolete = *obsolete + 1; }; @@ -226,7 +226,7 @@ Java_art_Test1974_StartAssignObsoleteIncrementedId(JNIEnv* env, jclass k ATTRIBU } extern "C" JNIEXPORT void JNICALL -Java_art_Test1974_EndAssignObsoleteIncrementedId(JNIEnv* env, jclass k ATTRIBUTE_UNUSED) { +Java_art_Test1974_EndAssignObsoleteIncrementedId(JNIEnv* env, [[maybe_unused]] jclass k) { jint id = FindExtensionEvent(env, "com.android.art.heap.obsolete_object_created"); if (env->ExceptionCheck()) { LOG(INFO) << "Could not find extension event!"; @@ -236,7 +236,7 @@ Java_art_Test1974_EndAssignObsoleteIncrementedId(JNIEnv* env, jclass k ATTRIBUTE } extern "C" JNIEXPORT jlongArray JNICALL -Java_art_Test1974_CollectFreedTags(JNIEnv* env, jclass k ATTRIBUTE_UNUSED) { +Java_art_Test1974_CollectFreedTags(JNIEnv* env, [[maybe_unused]] jclass k) { if (JvmtiErrorToException( env, jvmti_env, diff --git a/test/2005-pause-all-redefine-multithreaded/pause-all.cc b/test/2005-pause-all-redefine-multithreaded/pause-all.cc index 37d6c4d045..be0428d2cd 100644 --- a/test/2005-pause-all-redefine-multithreaded/pause-all.cc +++ b/test/2005-pause-all-redefine-multithreaded/pause-all.cc @@ -35,7 +35,7 @@ static constexpr jlong kRedefinedObjectTag = 0xDEADBEEF; extern "C" JNIEXPORT void JNICALL Java_art_Test2005_UpdateFieldValuesAndResumeThreads(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobjectArray threads_arr, jclass redefined_class, jobjectArray new_fields, @@ -54,10 +54,10 @@ Java_art_Test2005_UpdateFieldValuesAndResumeThreads(JNIEnv* env, CHECK_EQ(jvmti_env->IterateOverInstancesOfClass( redefined_class, JVMTI_HEAP_OBJECT_EITHER, - [](jlong class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, + []([[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong size, jlong* tag_ptr, - void* user_data ATTRIBUTE_UNUSED) -> jvmtiIterationControl { + [[maybe_unused]] void* user_data) -> jvmtiIterationControl { *tag_ptr = kRedefinedObjectTag; return JVMTI_ITERATION_CONTINUE; }, @@ -87,7 +87,7 @@ Java_art_Test2005_UpdateFieldValuesAndResumeThreads(JNIEnv* env, } extern "C" JNIEXPORT jobject JNICALL -Java_Main_fastNativeSleepAndReturnInteger42(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { +Java_Main_fastNativeSleepAndReturnInteger42(JNIEnv* env, [[maybe_unused]] jclass klass) { jclass integer_class = env->FindClass("java/lang/Integer"); CHECK(integer_class != nullptr); jmethodID integer_value_of = diff --git a/test/2009-structural-local-ref/local-ref.cc b/test/2009-structural-local-ref/local-ref.cc index 9f6ef0b25a..5bd32872bb 100644 --- a/test/2009-structural-local-ref/local-ref.cc +++ b/test/2009-structural-local-ref/local-ref.cc @@ -32,7 +32,7 @@ namespace art { namespace Test2009StructuralLocalRef { extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalCallStatic( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jobject thnk) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jobject thnk) { jclass obj_klass = env->GetObjectClass(obj); jmethodID run_meth = env->GetMethodID(env->FindClass("java/lang/Runnable"), "run", "()V"); env->CallVoidMethod(thnk, run_meth); @@ -46,7 +46,7 @@ extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalCallStatic( } extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalCallVirtual( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jobject thnk) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jobject thnk) { jclass obj_klass = env->GetObjectClass(obj); jmethodID run_meth = env->GetMethodID(env->FindClass("java/lang/Runnable"), "run", "()V"); env->CallVoidMethod(thnk, run_meth); @@ -58,7 +58,7 @@ extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalCallVirtual( } } extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalGetIField( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jobject thnk) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jobject thnk) { jclass obj_klass = env->GetObjectClass(obj); jmethodID run_meth = env->GetMethodID(env->FindClass("java/lang/Runnable"), "run", "()V"); env->CallVoidMethod(thnk, run_meth); @@ -71,7 +71,7 @@ extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalGetIField( } } extern "C" JNIEXPORT jstring JNICALL Java_art_Test2009_NativeLocalGetSField( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jobject thnk) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jobject thnk) { jclass obj_klass = env->GetObjectClass(obj); jmethodID run_meth = env->GetMethodID(env->FindClass("java/lang/Runnable"), "run", "()V"); env->CallVoidMethod(thnk, run_meth); diff --git a/test/2012-structural-redefinition-failures-jni-id/set-jni-id-used.cc b/test/2012-structural-redefinition-failures-jni-id/set-jni-id-used.cc index 4b3dac9a08..cd740aa57f 100644 --- a/test/2012-structural-redefinition-failures-jni-id/set-jni-id-used.cc +++ b/test/2012-structural-redefinition-failures-jni-id/set-jni-id-used.cc @@ -36,7 +36,7 @@ namespace art { namespace Test2012SetJniIdUsed { extern "C" JNIEXPORT void JNICALL Java_Main_SetPointerIdsUsed( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jclass target) { + JNIEnv* env, [[maybe_unused]] jclass klass, jclass target) { ScopedObjectAccess soa(env); StackHandleScope<1> hs(soa.Self()); Handle<mirror::Class> h(hs.NewHandle(soa.Decode<mirror::Class>(target))); diff --git a/test/2033-shutdown-mechanics/native_shutdown.cc b/test/2033-shutdown-mechanics/native_shutdown.cc index 2b7546a402..9cfc9897dc 100644 --- a/test/2033-shutdown-mechanics/native_shutdown.cc +++ b/test/2033-shutdown-mechanics/native_shutdown.cc @@ -34,7 +34,7 @@ static void MaybePrintTime() { extern "C" [[noreturn]] JNIEXPORT void JNICALL Java_Main_monitorShutdown( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { bool found_shutdown = false; bool found_runtime_deleted = false; JNIEnvExt* const extEnv = down_cast<JNIEnvExt*>(env); diff --git a/test/2035-structural-native-method/structural-native.cc b/test/2035-structural-native-method/structural-native.cc index bf51c8b3a1..a47e91c7f5 100644 --- a/test/2035-structural-native-method/structural-native.cc +++ b/test/2035-structural-native-method/structural-native.cc @@ -31,12 +31,12 @@ namespace art { namespace Test2035StructuralNativeMethod { -jlong JNICALL TransformNativeMethod(JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED) { +jlong JNICALL TransformNativeMethod([[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass) { return 42; } extern "C" JNIEXPORT void JNICALL Java_art_Test2035_LinkClassMethods( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jclass target) { + JNIEnv* env, [[maybe_unused]] jclass klass, jclass target) { JNINativeMethod meth{"getValue", "()J", reinterpret_cast<void*>(TransformNativeMethod)}; env->RegisterNatives(target, &meth, 1); } diff --git a/test/2040-huge-native-alloc/huge_native_buf.cc b/test/2040-huge-native-alloc/huge_native_buf.cc index 20f629a2b8..71675b2c35 100644 --- a/test/2040-huge-native-alloc/huge_native_buf.cc +++ b/test/2040-huge-native-alloc/huge_native_buf.cc @@ -26,18 +26,18 @@ namespace HugeNativeBuf { static constexpr size_t HUGE_SIZE = 10'000'000; extern "C" JNIEXPORT jobject JNICALL Java_Main_getHugeNativeBuffer( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { char* buffer = new char[HUGE_SIZE]; return env->NewDirectByteBuffer(buffer, HUGE_SIZE); } extern "C" JNIEXPORT void JNICALL Java_Main_deleteHugeNativeBuffer( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject jbuffer) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject jbuffer) { delete [] static_cast<char*>(env->GetDirectBufferAddress(jbuffer)); } extern "C" JNIEXPORT jint JNICALL Java_Main_getGcNum( - JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass) { return Runtime::Current()->GetHeap()->GetCurrentGcNum(); } diff --git a/test/2243-single-step-default/single_step_helper.cc b/test/2243-single-step-default/single_step_helper.cc index 432e982740..3b2d0bd214 100644 --- a/test/2243-single-step-default/single_step_helper.cc +++ b/test/2243-single-step-default/single_step_helper.cc @@ -29,7 +29,7 @@ static void singleStepCB(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, jmethodID method, - jlocation location ATTRIBUTE_UNUSED) { + [[maybe_unused]] jlocation location) { // We haven't reached the default method yet. Continue single stepping if (method != interface_default_method) { return; @@ -99,14 +99,14 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test2243_setSingleStepCallback(JNIEnv } extern "C" JNIEXPORT void JNICALL Java_art_Test2243_enableSingleStep(JNIEnv* env, - jclass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass cl, jthread thr) { jvmtiError err = jvmti_env->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_SINGLE_STEP, thr); JvmtiErrorToException(env, jvmti_env, err); } extern "C" JNIEXPORT void JNICALL Java_art_Test2243_setSingleStepUntil(JNIEnv* env, - jclass cl ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass cl, jobject method) { interface_default_method = env->FromReflectedMethod(method); } diff --git a/test/2262-miranda-methods/jni_invoke.cc b/test/2262-miranda-methods/jni_invoke.cc index da55f8b1d8..8bef7874c2 100644 --- a/test/2262-miranda-methods/jni_invoke.cc +++ b/test/2262-miranda-methods/jni_invoke.cc @@ -22,7 +22,7 @@ namespace art { extern "C" JNIEXPORT void JNICALL -Java_Main_CallNonvirtual(JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jobject o, jclass c, jmethodID m) { +Java_Main_CallNonvirtual(JNIEnv* env, [[maybe_unused]] jclass k, jobject o, jclass c, jmethodID m) { env->CallNonvirtualVoidMethod(o, c, m); } diff --git a/test/305-other-fault-handler/fault_handler.cc b/test/305-other-fault-handler/fault_handler.cc index db3f1f4bf7..240827b0d8 100644 --- a/test/305-other-fault-handler/fault_handler.cc +++ b/test/305-other-fault-handler/fault_handler.cc @@ -52,7 +52,7 @@ class TestFaultHandler final : public FaultHandler { manager_->RemoveHandler(this); } - bool Action(int sig, siginfo_t* siginfo, void* context ATTRIBUTE_UNUSED) override { + bool Action(int sig, siginfo_t* siginfo, [[maybe_unused]] void* context) override { CHECK_EQ(sig, SIGSEGV); CHECK_EQ(reinterpret_cast<uint32_t*>(siginfo->si_addr), GetTargetPointer()) << "Segfault on unexpected address!"; diff --git a/test/457-regs/regs_jni.cc b/test/457-regs/regs_jni.cc index 80abb3b406..76351ecf62 100644 --- a/test/457-regs/regs_jni.cc +++ b/test/457-regs/regs_jni.cc @@ -127,7 +127,7 @@ class TestVisitor : public StackVisitor { }; extern "C" JNIEXPORT void JNICALL Java_PhiLiveness_regsNativeCall( - JNIEnv*, jclass value ATTRIBUTE_UNUSED) { + JNIEnv*, [[maybe_unused]] jclass value) { ScopedObjectAccess soa(Thread::Current()); std::unique_ptr<Context> context(Context::Create()); TestVisitor visitor(soa.Self(), context.get()); @@ -136,7 +136,7 @@ extern "C" JNIEXPORT void JNICALL Java_PhiLiveness_regsNativeCall( } extern "C" JNIEXPORT void JNICALL Java_PhiLiveness_regsNativeCallWithParameters( - JNIEnv*, jclass value ATTRIBUTE_UNUSED, jobject main, jint int_value, jfloat float_value) { + JNIEnv*, [[maybe_unused]] jclass value, jobject main, jint int_value, jfloat float_value) { ScopedObjectAccess soa(Thread::Current()); std::unique_ptr<Context> context(Context::Create()); CHECK(soa.Decode<mirror::Object>(main) == nullptr); diff --git a/test/597-deopt-new-string/deopt.cc b/test/597-deopt-new-string/deopt.cc index 06dbca6d56..b8828157e0 100644 --- a/test/597-deopt-new-string/deopt.cc +++ b/test/597-deopt-new-string/deopt.cc @@ -28,7 +28,7 @@ namespace art { extern "C" JNIEXPORT void JNICALL Java_Main_deoptimizeAll( JNIEnv* env, - jclass cls ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass cls) { ScopedObjectAccess soa(env); ScopedThreadSuspension sts(Thread::Current(), ThreadState::kWaitingForDeoptimization); gc::ScopedGCCriticalSection gcs(Thread::Current(), @@ -41,7 +41,7 @@ extern "C" JNIEXPORT void JNICALL Java_Main_deoptimizeAll( extern "C" JNIEXPORT void JNICALL Java_Main_undeoptimizeAll( JNIEnv* env, - jclass cls ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass cls) { ScopedObjectAccess soa(env); ScopedThreadSuspension sts(Thread::Current(), ThreadState::kWaitingForDeoptimization); gc::ScopedGCCriticalSection gcs(Thread::Current(), diff --git a/test/720-thread-priority/thread_priority.cc b/test/720-thread-priority/thread_priority.cc index db4a2b29d9..519a0a10e0 100644 --- a/test/720-thread-priority/thread_priority.cc +++ b/test/720-thread-priority/thread_priority.cc @@ -22,7 +22,7 @@ #include "jni.h" extern "C" JNIEXPORT jint JNICALL Java_Main_getThreadPlatformPriority( - JNIEnv* env ATTRIBUTE_UNUSED, - jclass clazz ATTRIBUTE_UNUSED) { + [[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass clazz) { return getpriority(PRIO_PROCESS, art::GetTid()); } diff --git a/test/900-hello-plugin/load_unload.cc b/test/900-hello-plugin/load_unload.cc index 7121d108a4..1d83d0903d 100644 --- a/test/900-hello-plugin/load_unload.cc +++ b/test/900-hello-plugin/load_unload.cc @@ -30,7 +30,7 @@ constexpr jint TEST_900_ENV_VERSION_NUMBER = 0x900FFFFF; constexpr uintptr_t ENV_VALUE = 900; // Allow this library to be used as a plugin too so we can test the stack. -static jint GetEnvHandler(JavaVMExt* vm ATTRIBUTE_UNUSED, void** new_env, jint version) { +static jint GetEnvHandler([[maybe_unused]] JavaVMExt* vm, void** new_env, jint version) { printf("%s called in test 900\n", __func__); if (version != TEST_900_ENV_VERSION_NUMBER) { return JNI_EVERSION; @@ -53,7 +53,7 @@ extern "C" bool ArtPlugin_Deinitialize() { extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* vm, char* options, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* reserved) { printf("Agent_OnLoad called with options \"%s\"\n", options); if (strcmp("test_900_round_2", options) == 0) { return 0; @@ -67,7 +67,7 @@ extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* vm, return 0; } -extern "C" JNIEXPORT void JNICALL Agent_OnUnload(JavaVM* vm ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT void JNICALL Agent_OnUnload([[maybe_unused]] JavaVM* vm) { printf("Agent_OnUnload called\n"); } diff --git a/test/901-hello-ti-agent/basics.cc b/test/901-hello-ti-agent/basics.cc index 43a1d8319f..59212de1bd 100644 --- a/test/901-hello-ti-agent/basics.cc +++ b/test/901-hello-ti-agent/basics.cc @@ -44,14 +44,14 @@ static jvmtiPhase getPhase(jvmtiEnv* jenv) { return out; } -static void JNICALL VMStartCallback(jvmtiEnv *jenv, JNIEnv* jni_env ATTRIBUTE_UNUSED) { +static void JNICALL VMStartCallback(jvmtiEnv *jenv, [[maybe_unused]] JNIEnv* jni_env) { printf("VMStart (phase %d)\n", getPhase(jenv)); fsync(1); } static void JNICALL VMInitCallback(jvmtiEnv *jvmti_env, - JNIEnv* jni_env ATTRIBUTE_UNUSED, - jthread thread ATTRIBUTE_UNUSED) { + [[maybe_unused]] JNIEnv* jni_env, + [[maybe_unused]] jthread thread) { printf("VMInit (phase %d)\n", getPhase(jvmti_env)); fsync(1); } @@ -83,8 +83,8 @@ static void InstallVMEvents(jvmtiEnv* env) { } jint OnLoad(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { printf("Loaded Agent for test 901-hello-ti-agent\n"); fsync(1); jvmtiEnv* env = nullptr; @@ -157,14 +157,14 @@ jint OnLoad(JavaVM* vm, } extern "C" JNIEXPORT void JNICALL Java_art_Test901_setVerboseFlag( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jint iflag, jboolean val) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jint iflag, jboolean val) { jvmtiVerboseFlag flag = static_cast<jvmtiVerboseFlag>(iflag); jvmtiError result = jvmti_env->SetVerboseFlag(flag, val); JvmtiErrorToException(env, jvmti_env, result); } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test901_checkLivePhase( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jvmtiPhase current_phase; jvmtiError phase_result = jvmti_env->GetPhase(¤t_phase); if (JvmtiErrorToException(env, jvmti_env, phase_result)) { @@ -180,7 +180,7 @@ static void CallJvmtiFunction(jvmtiEnv* env, jclass klass, jvmtiError* err) { } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test901_checkUnattached( - JNIEnv* env ATTRIBUTE_UNUSED, jclass Main_klass) { + [[maybe_unused]] JNIEnv* env, jclass Main_klass) { jvmtiError res = JVMTI_ERROR_NONE; std::thread t1(CallJvmtiFunction, jvmti_env, Main_klass, &res); t1.join(); @@ -188,7 +188,7 @@ extern "C" JNIEXPORT jboolean JNICALL Java_art_Test901_checkUnattached( } extern "C" JNIEXPORT jstring JNICALL Java_art_Test901_getErrorName( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jint error) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jint error) { char* name; jvmtiError res = jvmti_env->GetErrorName(static_cast<jvmtiError>(error), &name); if (JvmtiErrorToException(env, jvmti_env, res)) { diff --git a/test/903-hello-tagging/tagging.cc b/test/903-hello-tagging/tagging.cc index e0a0136167..a5eb49caa9 100644 --- a/test/903-hello-tagging/tagging.cc +++ b/test/903-hello-tagging/tagging.cc @@ -134,7 +134,7 @@ static jlong GetTag(jvmtiEnv* env, jobject obj) { } extern "C" JNIEXPORT jlongArray JNICALL Java_art_Test903_testTagsInDifferentEnvs( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jlong base_tag, jint count) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jlong base_tag, jint count) { std::unique_ptr<jvmtiEnv*[]> envs = std::unique_ptr<jvmtiEnv*[]>(new jvmtiEnv*[count]); envs[0] = jvmti_env; for (int32_t i = 1; i != count; ++i) { diff --git a/test/904-object-allocation/tracking.cc b/test/904-object-allocation/tracking.cc index abb6083de4..4b14932811 100644 --- a/test/904-object-allocation/tracking.cc +++ b/test/904-object-allocation/tracking.cc @@ -84,7 +84,7 @@ struct EventLog { static std::mutex gEventsMutex; static std::vector<EventLog> gEvents; -static void JNICALL ObjectAllocated(jvmtiEnv* ti_env ATTRIBUTE_UNUSED, +static void JNICALL ObjectAllocated([[maybe_unused]] jvmtiEnv* ti_env, JNIEnv* jni_env, jthread thread, jobject object, @@ -99,7 +99,7 @@ static void JNICALL ObjectAllocated(jvmtiEnv* ti_env ATTRIBUTE_UNUSED, } extern "C" JNIEXPORT void JNICALL Java_art_Test904_setupObjectAllocCallback( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jboolean enable) { + JNIEnv* env, [[maybe_unused]] jclass klass, jboolean enable) { env->GetJavaVM(&vm); jvmtiEventCallbacks callbacks; memset(&callbacks, 0, sizeof(jvmtiEventCallbacks)); @@ -119,7 +119,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test904_enableAllocationTracking( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test904_getTrackingEventMessages( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jobjectArray threads) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jobjectArray threads) { std::lock_guard<std::mutex> guard(gEventsMutex); std::vector<std::string> real_events; std::vector<jthread> thread_lst; diff --git a/test/905-object-free/tracking_free.cc b/test/905-object-free/tracking_free.cc index d85d9d34ae..ae93322ec5 100644 --- a/test/905-object-free/tracking_free.cc +++ b/test/905-object-free/tracking_free.cc @@ -64,7 +64,7 @@ static void setupObjectFreeCallback(JNIEnv* env, jvmtiEnv* jenv, jvmtiEventObjec } extern "C" JNIEXPORT void JNICALL Java_art_Test905_setupObjectFreeCallback( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { setupObjectFreeCallback(env, jvmti_env, ObjectFree1); JavaVM* jvm = nullptr; env->GetJavaVM(&jvm); @@ -74,7 +74,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test905_setupObjectFreeCallback( } extern "C" JNIEXPORT void JNICALL Java_art_Test905_enableFreeTracking( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jboolean enable) { + JNIEnv* env, [[maybe_unused]] jclass klass, jboolean enable) { jvmtiError ret = jvmti_env->SetEventNotificationMode( enable ? JVMTI_ENABLE : JVMTI_DISABLE, JVMTI_EVENT_OBJECT_FREE, @@ -90,7 +90,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test905_enableFreeTracking( } extern "C" JNIEXPORT jlongArray JNICALL Java_art_Test905_getCollectedTags( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint index) { + JNIEnv* env, [[maybe_unused]] jclass klass, jint index) { std::lock_guard<std::mutex> mu((index == 0) ? ct1_mutex : ct2_mutex); std::vector<jlong>& tags = (index == 0) ? collected_tags1 : collected_tags2; jlongArray ret = env->NewLongArray(tags.size()); @@ -105,7 +105,7 @@ extern "C" JNIEXPORT jlongArray JNICALL Java_art_Test905_getCollectedTags( } extern "C" JNIEXPORT jlong JNICALL Java_art_Test905_getTag2( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj) { jlong tag; jvmtiError ret = jvmti_env2->GetTag(obj, &tag); JvmtiErrorToException(env, jvmti_env, ret); @@ -113,7 +113,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Test905_getTag2( } extern "C" JNIEXPORT void JNICALL Java_art_Test905_setTag2( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject obj, jlong tag) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject obj, jlong tag) { jvmtiError ret = jvmti_env2->SetTag(obj, tag); JvmtiErrorToException(env, jvmti_env, ret); } diff --git a/test/906-iterate-heap/iterate_heap.cc b/test/906-iterate-heap/iterate_heap.cc index f0a6624ef2..b226eb03b6 100644 --- a/test/906-iterate-heap/iterate_heap.cc +++ b/test/906-iterate-heap/iterate_heap.cc @@ -73,7 +73,7 @@ static bool Run(JNIEnv* env, jint heap_filter, jclass klass_filter, IterationCon extern "C" JNIEXPORT jint JNICALL Java_art_Test906_iterateThroughHeapCount( JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jint heap_filter, jclass klass_filter, jint stop_after) { @@ -84,10 +84,10 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test906_iterateThroughHeapCount( stop_after(_stop_after) { } - jint Handle(jlong class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, - jlong* tag_ptr ATTRIBUTE_UNUSED, - jint length ATTRIBUTE_UNUSED) override { + jint Handle([[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong size, + [[maybe_unused]] jlong* tag_ptr, + [[maybe_unused]] jint length) override { counter++; if (counter == stop_after) { return JVMTI_VISIT_ABORT; @@ -111,7 +111,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test906_iterateThroughHeapCount( extern "C" JNIEXPORT jint JNICALL Java_art_Test906_iterateThroughHeapData( JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jint heap_filter, jclass klass_filter, jlongArray class_tags, @@ -156,15 +156,15 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test906_iterateThroughHeapData( } extern "C" JNIEXPORT void JNICALL Java_art_Test906_iterateThroughHeapAdd( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint heap_filter, jclass klass_filter) { + JNIEnv* env, [[maybe_unused]] jclass klass, jint heap_filter, jclass klass_filter) { class AddIterationConfig : public IterationConfig { public: AddIterationConfig() {} - jint Handle(jlong class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, + jint Handle([[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong size, jlong* tag_ptr, - jint length ATTRIBUTE_UNUSED) override { + [[maybe_unused]] jint length) override { jlong current_tag = *tag_ptr; if (current_tag != 0) { *tag_ptr = current_tag + 10; @@ -178,15 +178,15 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test906_iterateThroughHeapAdd( } extern "C" JNIEXPORT jstring JNICALL Java_art_Test906_iterateThroughHeapString( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) { + JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag) { struct FindStringCallbacks { explicit FindStringCallbacks(jlong t) : tag_to_find(t) {} - static jint JNICALL HeapIterationCallback(jlong class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, - jlong* tag_ptr ATTRIBUTE_UNUSED, - jint length ATTRIBUTE_UNUSED, - void* user_data ATTRIBUTE_UNUSED) { + static jint JNICALL HeapIterationCallback([[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong size, + [[maybe_unused]] jlong* tag_ptr, + [[maybe_unused]] jint length, + [[maybe_unused]] void* user_data) { return 0; } @@ -234,15 +234,15 @@ extern "C" JNIEXPORT jstring JNICALL Java_art_Test906_iterateThroughHeapString( } extern "C" JNIEXPORT jstring JNICALL Java_art_Test906_iterateThroughHeapPrimitiveArray( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) { + JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag) { struct FindArrayCallbacks { explicit FindArrayCallbacks(jlong t) : tag_to_find(t) {} - static jint JNICALL HeapIterationCallback(jlong class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, - jlong* tag_ptr ATTRIBUTE_UNUSED, - jint length ATTRIBUTE_UNUSED, - void* user_data ATTRIBUTE_UNUSED) { + static jint JNICALL HeapIterationCallback([[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong size, + [[maybe_unused]] jlong* tag_ptr, + [[maybe_unused]] jint length, + [[maybe_unused]] void* user_data) { return 0; } @@ -345,15 +345,15 @@ static constexpr const char* GetPrimitiveTypeName(jvmtiPrimitiveType type) { } extern "C" JNIEXPORT jstring JNICALL Java_art_Test906_iterateThroughHeapPrimitiveFields( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) { + JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag) { struct FindFieldCallbacks { explicit FindFieldCallbacks(jlong t) : tag_to_find(t) {} - static jint JNICALL HeapIterationCallback(jlong class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, - jlong* tag_ptr ATTRIBUTE_UNUSED, - jint length ATTRIBUTE_UNUSED, - void* user_data ATTRIBUTE_UNUSED) { + static jint JNICALL HeapIterationCallback([[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong size, + [[maybe_unused]] jlong* tag_ptr, + [[maybe_unused]] jint length, + [[maybe_unused]] void* user_data) { return 0; } diff --git a/test/907-get-loaded-classes/get_loaded_classes.cc b/test/907-get-loaded-classes/get_loaded_classes.cc index 87c98e186e..58addd1bd3 100644 --- a/test/907-get-loaded-classes/get_loaded_classes.cc +++ b/test/907-get-loaded-classes/get_loaded_classes.cc @@ -41,7 +41,7 @@ static jstring GetClassName(JNIEnv* jni_env, jclass cls) { } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test907_getLoadedClasses( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { jint count = -1; jclass* classes = nullptr; jvmtiError result = jvmti_env->GetLoadedClasses(&count, &classes); diff --git a/test/908-gc-start-finish/gc_callbacks.cc b/test/908-gc-start-finish/gc_callbacks.cc index ddd2ba7734..e839a22734 100644 --- a/test/908-gc-start-finish/gc_callbacks.cc +++ b/test/908-gc-start-finish/gc_callbacks.cc @@ -32,16 +32,16 @@ namespace Test908GcStartFinish { static size_t starts = 0; static size_t finishes = 0; -static void JNICALL GarbageCollectionFinish(jvmtiEnv* ti_env ATTRIBUTE_UNUSED) { +static void JNICALL GarbageCollectionFinish([[maybe_unused]] jvmtiEnv* ti_env) { finishes++; } -static void JNICALL GarbageCollectionStart(jvmtiEnv* ti_env ATTRIBUTE_UNUSED) { +static void JNICALL GarbageCollectionStart([[maybe_unused]] jvmtiEnv* ti_env) { starts++; } extern "C" JNIEXPORT void JNICALL Java_art_Test908_setupGcCallback( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { jvmtiEventCallbacks callbacks; memset(&callbacks, 0, sizeof(jvmtiEventCallbacks)); callbacks.GarbageCollectionFinish = GarbageCollectionFinish; @@ -52,7 +52,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test908_setupGcCallback( } extern "C" JNIEXPORT void JNICALL Java_art_Test908_enableGcTracking(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jboolean enable) { jvmtiError ret = jvmti_env->SetEventNotificationMode( enable ? JVMTI_ENABLE : JVMTI_DISABLE, @@ -70,15 +70,15 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test908_enableGcTracking(JNIEnv* env, } } -extern "C" JNIEXPORT jint JNICALL Java_art_Test908_getGcStarts(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT jint JNICALL Java_art_Test908_getGcStarts([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { jint result = static_cast<jint>(starts); starts = 0; return result; } -extern "C" JNIEXPORT jint JNICALL Java_art_Test908_getGcFinishes(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT jint JNICALL Java_art_Test908_getGcFinishes([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { jint result = static_cast<jint>(finishes); finishes = 0; return result; diff --git a/test/909-attach-agent/attach.cc b/test/909-attach-agent/attach.cc index 50ab26a374..56d6c0ff1f 100644 --- a/test/909-attach-agent/attach.cc +++ b/test/909-attach-agent/attach.cc @@ -35,8 +35,8 @@ static void Println(const char* c) { static constexpr jint kArtTiVersion = JVMTI_VERSION_1_2 | 0x40000000; jint OnAttach(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { Println("Attached Agent for test 909-attach-agent"); jvmtiEnv* env = nullptr; jvmtiEnv* env2 = nullptr; diff --git a/test/910-methods/methods.cc b/test/910-methods/methods.cc index 9c726e1d5c..473e875085 100644 --- a/test/910-methods/methods.cc +++ b/test/910-methods/methods.cc @@ -31,7 +31,7 @@ namespace art { namespace Test910Methods { extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test910_getMethodName( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); char* name; @@ -74,7 +74,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test910_getMethodName( } extern "C" JNIEXPORT jclass JNICALL Java_art_Test910_getMethodDeclaringClass( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jclass declaring_class; @@ -87,7 +87,7 @@ extern "C" JNIEXPORT jclass JNICALL Java_art_Test910_getMethodDeclaringClass( } extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getMethodModifiers( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jint modifiers; @@ -100,7 +100,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getMethodModifiers( } extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getMaxLocals( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jint max_locals; @@ -113,7 +113,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getMaxLocals( } extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getArgumentsSize( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jint arguments; @@ -126,7 +126,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test910_getArgumentsSize( } extern "C" JNIEXPORT jlong JNICALL Java_art_Test910_getMethodLocationStart( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jlong start; @@ -140,7 +140,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Test910_getMethodLocationStart( } extern "C" JNIEXPORT jlong JNICALL Java_art_Test910_getMethodLocationEnd( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jlong start; @@ -154,7 +154,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Test910_getMethodLocationEnd( } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test910_isMethodNative( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jboolean is_native; @@ -167,7 +167,7 @@ extern "C" JNIEXPORT jboolean JNICALL Java_art_Test910_isMethodNative( } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test910_isMethodObsolete( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jboolean is_obsolete; @@ -180,7 +180,7 @@ extern "C" JNIEXPORT jboolean JNICALL Java_art_Test910_isMethodObsolete( } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test910_isMethodSynthetic( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method) { jmethodID id = env->FromReflectedMethod(method); jboolean is_synthetic; diff --git a/test/911-get-stack-trace/stack_trace.cc b/test/911-get-stack-trace/stack_trace.cc index 2b620b1fc0..3b8210a9be 100644 --- a/test/911-get-stack-trace/stack_trace.cc +++ b/test/911-get-stack-trace/stack_trace.cc @@ -126,7 +126,7 @@ static jobjectArray TranslateJvmtiFrameInfoArray(JNIEnv* env, } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_PrintThread_getStackTrace( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thread, jint start, jint max) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thread, jint start, jint max) { std::unique_ptr<jvmtiFrameInfo[]> frames(new jvmtiFrameInfo[max]); jint count; @@ -141,7 +141,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_PrintThread_getStackTrace( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_AllTraces_getAllStackTraces( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint max) { + JNIEnv* env, [[maybe_unused]] jclass klass, jint max) { jint thread_count; jvmtiStackInfo* stack_infos; { @@ -169,7 +169,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_AllTraces_getAllStackTraces( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_ThreadListTraces_getThreadListStackTraces( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobjectArray jthreads, jint max) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobjectArray jthreads, jint max) { jint thread_count = env->GetArrayLength(jthreads); std::unique_ptr<jthread[]> threads(new jthread[thread_count]); for (jint i = 0; i != thread_count; ++i) { @@ -205,7 +205,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_ThreadListTraces_getThreadLis } extern "C" JNIEXPORT jint JNICALL Java_art_Frames_getFrameCount( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thread) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thread) { jint count; jvmtiError result = jvmti_env->GetFrameCount(thread, &count); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -215,7 +215,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Frames_getFrameCount( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Frames_getFrameLocation( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thread, jint depth) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thread, jint depth) { jmethodID method; jlocation location; diff --git a/test/912-classes/classes.cc b/test/912-classes/classes.cc index ff50223861..8fa41f9ba7 100644 --- a/test/912-classes/classes.cc +++ b/test/912-classes/classes.cc @@ -37,7 +37,7 @@ namespace art { namespace Test912Classes { extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isModifiableClass( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { jboolean res = JNI_FALSE; jvmtiError result = jvmti_env->IsModifiableClass(klass, &res); JvmtiErrorToException(env, jvmti_env, result); @@ -45,7 +45,7 @@ extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isModifiableClass( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassSignature( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { char* sig; char* gen; jvmtiError result = jvmti_env->GetClassSignature(klass, &sig, &gen); @@ -74,7 +74,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassSignature( } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isInterface( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { jboolean is_interface = JNI_FALSE; jvmtiError result = jvmti_env->IsInterface(klass, &is_interface); JvmtiErrorToException(env, jvmti_env, result); @@ -82,7 +82,7 @@ extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isInterface( } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isArrayClass( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { jboolean is_array_class = JNI_FALSE; jvmtiError result = jvmti_env->IsArrayClass(klass, &is_array_class); JvmtiErrorToException(env, jvmti_env, result); @@ -90,7 +90,7 @@ extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912_isArrayClass( } extern "C" JNIEXPORT jint JNICALL Java_art_Test912_getClassModifiers( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { jint mod; jvmtiError result = jvmti_env->GetClassModifiers(klass, &mod); JvmtiErrorToException(env, jvmti_env, result); @@ -98,7 +98,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test912_getClassModifiers( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassFields( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { jint count = 0; jfieldID* fields = nullptr; jvmtiError result = jvmti_env->GetClassFields(klass, &count, &fields); @@ -123,7 +123,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassFields( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassMethods( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { jint count = 0; jmethodID* methods = nullptr; jvmtiError result = jvmti_env->GetClassMethods(klass, &count, &methods); @@ -148,7 +148,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassMethods( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getImplementedInterfaces( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { jint count = 0; jclass* classes = nullptr; jvmtiError result = jvmti_env->GetImplementedInterfaces(klass, &count, &classes); @@ -167,7 +167,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getImplementedInterfa } extern "C" JNIEXPORT jint JNICALL Java_art_Test912_getClassStatus( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass , jclass klass) { jint status; jvmtiError result = jvmti_env->GetClassStatus(klass, &status); JvmtiErrorToException(env, jvmti_env, result); @@ -175,7 +175,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test912_getClassStatus( } extern "C" JNIEXPORT jobject JNICALL Java_art_Test912_getClassLoader( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass , jclass klass) { jobject classloader; jvmtiError result = jvmti_env->GetClassLoader(klass, &classloader); JvmtiErrorToException(env, jvmti_env, result); @@ -183,7 +183,7 @@ extern "C" JNIEXPORT jobject JNICALL Java_art_Test912_getClassLoader( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassLoaderClasses( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jobject jclassloader) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass , jobject jclassloader) { jint count = 0; jclass* classes = nullptr; jvmtiError result = jvmti_env->GetClassLoaderClasses(jclassloader, &count, &classes); @@ -202,7 +202,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassLoaderClasses } extern "C" JNIEXPORT jintArray JNICALL Java_art_Test912_getClassVersion( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass , jclass klass) { jint major, minor; jvmtiError result = jvmti_env->GetClassVersionNumbers(klass, &minor, &major); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -280,7 +280,7 @@ static std::mutex gEventsMutex; static std::vector<std::string> gEvents; extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test912_getClassLoadMessages( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { std::lock_guard<std::mutex> guard(gEventsMutex); jobjectArray ret = CreateObjectArray(env, static_cast<jint>(gEvents.size()), @@ -365,7 +365,7 @@ class ClassLoadPreparePrinter { std::string ClassLoadPreparePrinter::thread_name_filter_; // NOLINT [runtime/string] [4] extern "C" JNIEXPORT void JNICALL Java_art_Test912_enableClassLoadPreparePrintEvents( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jboolean enable, jthread thread) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass , jboolean enable, jthread thread) { if (thread != nullptr) { ClassLoadPreparePrinter::thread_name_filter_ = ClassLoadPreparePrinter::GetThreadName(jvmti_env, env, thread); @@ -432,7 +432,7 @@ class ClassLoadPrepareEquality { static constexpr const char* kWeakInitSig = "(Ljava/lang/Object;)V"; static constexpr const char* kWeakGetSig = "()Ljava/lang/Object;"; - static void AgentThreadTest(jvmtiEnv* jvmti ATTRIBUTE_UNUSED, + static void AgentThreadTest([[maybe_unused]] jvmtiEnv* jvmti, JNIEnv* env, jobject* obj_global) { jobject target = *obj_global; @@ -449,7 +449,7 @@ class ClassLoadPrepareEquality { static void JNICALL ClassLoadCallback(jvmtiEnv* jenv, JNIEnv* jni_env, - jthread thread ATTRIBUTE_UNUSED, + [[maybe_unused]] jthread thread, jclass klass) { std::string name = GetClassName(jenv, jni_env, klass); if (name == kClassName) { @@ -470,7 +470,7 @@ class ClassLoadPrepareEquality { static void JNICALL ClassPrepareCallback(jvmtiEnv* jenv, JNIEnv* jni_env, - jthread thread ATTRIBUTE_UNUSED, + [[maybe_unused]] jthread thread, jclass klass) { std::string name = GetClassName(jenv, jni_env, klass); if (name == kClassName) { @@ -577,13 +577,13 @@ bool ClassLoadPrepareEquality::found_ = false; bool ClassLoadPrepareEquality::compared_ = false; extern "C" JNIEXPORT void JNICALL Java_art_Test912_setEqualityEventStorageClass( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jclass klass) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jclass klass) { ClassLoadPrepareEquality::storage_class_ = reinterpret_cast<jclass>(env->NewGlobalRef(klass)); } extern "C" JNIEXPORT void JNICALL Java_art_Test912_enableClassLoadPrepareEqualityEvents( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jboolean b) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jboolean b) { EnableEvents(env, b, ClassLoadPrepareEquality::ClassLoadCallback, @@ -599,17 +599,17 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test912_enableClassLoadPrepareEqualit // Global to pass information to the ClassPrepare event. static jobject gRunnableGlobal = nullptr; extern "C" JNIEXPORT void JNICALL Java_art_Test912_runRecursiveClassPrepareEvents( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject runnable) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject runnable) { CHECK(gRunnableGlobal == nullptr); gRunnableGlobal = env->NewGlobalRef(runnable); EnableEvents( env, true, nullptr, - [](jvmtiEnv* jenv ATTRIBUTE_UNUSED, + []([[maybe_unused]] jvmtiEnv* jenv, JNIEnv* jni_env, - jthread thread ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) -> void { + [[maybe_unused]] jthread thread, + [[maybe_unused]] jclass klass) -> void { jclass runnable_class = jni_env->FindClass("java/lang/Runnable"); jni_env->CallVoidMethod( gRunnableGlobal, jni_env->GetMethodID(runnable_class, "run", "()V")); diff --git a/test/912-classes/classes_art.cc b/test/912-classes/classes_art.cc index de2e456a53..a3b4d94fa4 100644 --- a/test/912-classes/classes_art.cc +++ b/test/912-classes/classes_art.cc @@ -75,10 +75,10 @@ static void EnableEvents(JNIEnv* env, } struct ClassLoadSeen { - static void JNICALL ClassLoadSeenCallback(jvmtiEnv* jenv ATTRIBUTE_UNUSED, - JNIEnv* jni_env ATTRIBUTE_UNUSED, - jthread thread ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { + static void JNICALL ClassLoadSeenCallback([[maybe_unused]] jvmtiEnv* jenv, + [[maybe_unused]] JNIEnv* jni_env, + [[maybe_unused]] jthread thread, + [[maybe_unused]] jclass klass) { saw_event = true; } @@ -87,17 +87,17 @@ struct ClassLoadSeen { bool ClassLoadSeen::saw_event = false; extern "C" JNIEXPORT void JNICALL Java_art_Test912Art_enableClassLoadSeenEvents( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jboolean b) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jboolean b) { EnableEvents(env, b, ClassLoadSeen::ClassLoadSeenCallback, nullptr); } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912Art_hadLoadEvent( - JNIEnv* env ATTRIBUTE_UNUSED, jclass Main_klass ATTRIBUTE_UNUSED) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass Main_klass) { return ClassLoadSeen::saw_event ? JNI_TRUE : JNI_FALSE; } extern "C" JNIEXPORT jboolean JNICALL Java_art_Test912Art_isLoadedClass( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring class_name) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring class_name) { ScopedUtfChars name(env, class_name); jint class_count; diff --git a/test/913-heaps/heaps.cc b/test/913-heaps/heaps.cc index 671cff8a31..e4c3223aaa 100644 --- a/test/913-heaps/heaps.cc +++ b/test/913-heaps/heaps.cc @@ -48,7 +48,7 @@ static constexpr jlong kThreadTag = 3000; static constexpr const char* kThreadReferree = "3000@0"; extern "C" JNIEXPORT void JNICALL Java_art_Test913_forceGarbageCollection( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { jvmtiError ret = jvmti_env->ForceGarbageCollection(); JvmtiErrorToException(env, jvmti_env, ret); } @@ -68,7 +68,7 @@ static constexpr jlong kClassDataSerialBase = 123456780000; // Register a class (or general object) in the class-data map. The serial number is determined by // the order of calls to this function (so stable Java code leads to stable numbering). extern "C" JNIEXPORT void JNICALL Java_art_Test913_registerClass( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag, jobject obj) { + JNIEnv* env, [[maybe_unused]] jclass klass, jlong tag, jobject obj) { ClassData data; if (JvmtiErrorToException(env, jvmti_env, jvmti_env->GetObjectSize(obj, &data.size))) { return; @@ -139,7 +139,7 @@ static bool Run(JNIEnv* env, extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test913_followReferences( JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jint heap_filter, jclass klass_filter, jobject initial_object, @@ -162,7 +162,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test913_followReferences( jlong* tag_ptr, jlong* referrer_tag_ptr, jint length, - void* user_data ATTRIBUTE_UNUSED) override { + [[maybe_unused]] void* user_data) override { jlong tag = *tag_ptr; // Ignore any jni-global roots with untagged classes. These can be from the environment, @@ -570,18 +570,18 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test913_followReferences( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test913_followReferencesString( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject initial_object) { + JNIEnv* env, [[maybe_unused]] jclass klass , jobject initial_object) { struct FindStringCallbacks { static jint JNICALL FollowReferencesCallback( - jvmtiHeapReferenceKind reference_kind ATTRIBUTE_UNUSED, - const jvmtiHeapReferenceInfo* reference_info ATTRIBUTE_UNUSED, - jlong class_tag ATTRIBUTE_UNUSED, - jlong referrer_class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, - jlong* tag_ptr ATTRIBUTE_UNUSED, - jlong* referrer_tag_ptr ATTRIBUTE_UNUSED, - jint length ATTRIBUTE_UNUSED, - void* user_data ATTRIBUTE_UNUSED) { + [[maybe_unused]] jvmtiHeapReferenceKind reference_kind, + [[maybe_unused]] const jvmtiHeapReferenceInfo* reference_info, + [[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong referrer_class_tag, + [[maybe_unused]] jlong size, + [[maybe_unused]] jlong* tag_ptr, + [[maybe_unused]] jlong* referrer_tag_ptr, + [[maybe_unused]] jint length, + [[maybe_unused]] void* user_data) { return JVMTI_VISIT_OBJECTS; // Continue visiting. } @@ -633,18 +633,18 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test913_followReferencesStrin extern "C" JNIEXPORT jstring JNICALL Java_art_Test913_followReferencesPrimitiveArray( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject initial_object) { + JNIEnv* env, [[maybe_unused]] jclass klass , jobject initial_object) { struct FindArrayCallbacks { static jint JNICALL FollowReferencesCallback( - jvmtiHeapReferenceKind reference_kind ATTRIBUTE_UNUSED, - const jvmtiHeapReferenceInfo* reference_info ATTRIBUTE_UNUSED, - jlong class_tag ATTRIBUTE_UNUSED, - jlong referrer_class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, - jlong* tag_ptr ATTRIBUTE_UNUSED, - jlong* referrer_tag_ptr ATTRIBUTE_UNUSED, - jint length ATTRIBUTE_UNUSED, - void* user_data ATTRIBUTE_UNUSED) { + [[maybe_unused]] jvmtiHeapReferenceKind reference_kind, + [[maybe_unused]] const jvmtiHeapReferenceInfo* reference_info, + [[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong referrer_class_tag, + [[maybe_unused]] jlong size, + [[maybe_unused]] jlong* tag_ptr, + [[maybe_unused]] jlong* referrer_tag_ptr, + [[maybe_unused]] jint length, + [[maybe_unused]] void* user_data) { return JVMTI_VISIT_OBJECTS; // Continue visiting. } @@ -749,18 +749,18 @@ static constexpr const char* GetPrimitiveTypeName(jvmtiPrimitiveType type) { } extern "C" JNIEXPORT jstring JNICALL Java_art_Test913_followReferencesPrimitiveFields( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject initial_object) { + JNIEnv* env, [[maybe_unused]] jclass klass , jobject initial_object) { struct FindFieldCallbacks { static jint JNICALL FollowReferencesCallback( - jvmtiHeapReferenceKind reference_kind ATTRIBUTE_UNUSED, - const jvmtiHeapReferenceInfo* reference_info ATTRIBUTE_UNUSED, - jlong class_tag ATTRIBUTE_UNUSED, - jlong referrer_class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, - jlong* tag_ptr ATTRIBUTE_UNUSED, - jlong* referrer_tag_ptr ATTRIBUTE_UNUSED, - jint length ATTRIBUTE_UNUSED, - void* user_data ATTRIBUTE_UNUSED) { + [[maybe_unused]] jvmtiHeapReferenceKind reference_kind, + [[maybe_unused]] const jvmtiHeapReferenceInfo* reference_info, + [[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong referrer_class_tag, + [[maybe_unused]] jlong size, + [[maybe_unused]] jlong* tag_ptr, + [[maybe_unused]] jlong* referrer_tag_ptr, + [[maybe_unused]] jint length, + [[maybe_unused]] void* user_data) { return JVMTI_VISIT_OBJECTS; // Continue visiting. } @@ -823,16 +823,16 @@ extern "C" JNIEXPORT jstring JNICALL Java_art_Test913_followReferencesPrimitiveF static size_t starts = 0; static size_t finishes = 0; -static void JNICALL GarbageCollectionFinish(jvmtiEnv* ti_env ATTRIBUTE_UNUSED) { +static void JNICALL GarbageCollectionFinish([[maybe_unused]] jvmtiEnv* ti_env) { finishes++; } -static void JNICALL GarbageCollectionStart(jvmtiEnv* ti_env ATTRIBUTE_UNUSED) { +static void JNICALL GarbageCollectionStart([[maybe_unused]] jvmtiEnv* ti_env) { starts++; } extern "C" JNIEXPORT void JNICALL Java_art_Test913_setupGcCallback( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { jvmtiEventCallbacks callbacks; memset(&callbacks, 0, sizeof(jvmtiEventCallbacks)); callbacks.GarbageCollectionFinish = GarbageCollectionFinish; @@ -843,7 +843,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test913_setupGcCallback( } extern "C" JNIEXPORT void JNICALL Java_art_Test913_enableGcTracking(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jboolean enable) { jvmtiError ret = jvmti_env->SetEventNotificationMode( enable ? JVMTI_ENABLE : JVMTI_DISABLE, @@ -861,15 +861,15 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test913_enableGcTracking(JNIEnv* env, } } -extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getGcStarts(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getGcStarts([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { jint result = static_cast<jint>(starts); starts = 0; return result; } -extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getGcFinishes(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getGcFinishes([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { jint result = static_cast<jint>(finishes); finishes = 0; return result; @@ -902,7 +902,7 @@ static void FreeExtensionFunctionInfo(jvmtiExtensionFunctionInfo* extensions, ji } extern "C" JNIEXPORT void JNICALL Java_art_Test913_checkForExtensionApis( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { jint extension_count; jvmtiExtensionFunctionInfo* extensions; jvmtiError result = jvmti_env->GetExtensionFunctions(&extension_count, &extensions); @@ -993,7 +993,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test913_checkForExtensionApis( } extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getObjectHeapId( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag) { + JNIEnv* env, [[maybe_unused]] jclass klass , jlong tag) { CHECK(gGetObjectHeapIdFn != nullptr); jint heap_id; jvmtiError result = gGetObjectHeapIdFn(jvmti_env, tag, &heap_id); @@ -1002,7 +1002,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test913_getObjectHeapId( } extern "C" JNIEXPORT jstring JNICALL Java_art_Test913_getHeapName( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint heap_id) { + JNIEnv* env, [[maybe_unused]] jclass klass , jint heap_id) { CHECK(gGetHeapNameFn != nullptr); char* heap_name; jvmtiError result = gGetHeapNameFn(jvmti_env, heap_id, &heap_name); @@ -1015,20 +1015,20 @@ extern "C" JNIEXPORT jstring JNICALL Java_art_Test913_getHeapName( } extern "C" JNIEXPORT void JNICALL Java_art_Test913_checkGetObjectHeapIdInCallback( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jlong tag, jint heap_id) { + JNIEnv* env, [[maybe_unused]] jclass klass , jlong tag, jint heap_id) { CHECK(gGetObjectHeapIdFn != nullptr); { struct GetObjectHeapIdCallbacks { static jint JNICALL FollowReferencesCallback( - jvmtiHeapReferenceKind reference_kind ATTRIBUTE_UNUSED, - const jvmtiHeapReferenceInfo* reference_info ATTRIBUTE_UNUSED, - jlong class_tag ATTRIBUTE_UNUSED, - jlong referrer_class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, + [[maybe_unused]] jvmtiHeapReferenceKind reference_kind, + [[maybe_unused]] const jvmtiHeapReferenceInfo* reference_info, + [[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong referrer_class_tag, + [[maybe_unused]] jlong size, jlong* tag_ptr, - jlong* referrer_tag_ptr ATTRIBUTE_UNUSED, - jint length ATTRIBUTE_UNUSED, + [[maybe_unused]] jlong* referrer_tag_ptr, + [[maybe_unused]] jint length, void* user_data) { if (*tag_ptr != 0) { GetObjectHeapIdCallbacks* p = reinterpret_cast<GetObjectHeapIdCallbacks*>(user_data); @@ -1064,10 +1064,10 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test913_checkGetObjectHeapIdInCallbac { struct GetObjectHeapIdCallbacks { - static jint JNICALL HeapIterationCallback(jlong class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, + static jint JNICALL HeapIterationCallback([[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong size, jlong* tag_ptr, - jint length ATTRIBUTE_UNUSED, + [[maybe_unused]] jint length, void* user_data) { if (*tag_ptr != 0) { GetObjectHeapIdCallbacks* p = reinterpret_cast<GetObjectHeapIdCallbacks*>(user_data); @@ -1104,11 +1104,11 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test913_checkGetObjectHeapIdInCallbac static bool gFoundExt = false; -static jint JNICALL HeapIterationExtCallback(jlong class_tag ATTRIBUTE_UNUSED, - jlong size ATTRIBUTE_UNUSED, +static jint JNICALL HeapIterationExtCallback([[maybe_unused]] jlong class_tag, + [[maybe_unused]] jlong size, jlong* tag_ptr, - jint length ATTRIBUTE_UNUSED, - void* user_data ATTRIBUTE_UNUSED, + [[maybe_unused]] jint length, + [[maybe_unused]] void* user_data, jint heap_id) { // We expect some tagged objects at or above the threshold, where the expected heap id is // encoded into lowest byte. @@ -1123,7 +1123,7 @@ static jint JNICALL HeapIterationExtCallback(jlong class_tag ATTRIBUTE_UNUSED, } extern "C" JNIEXPORT void JNICALL Java_art_Test913_iterateThroughHeapExt( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { CHECK(gIterateThroughHeapExt != nullptr); jvmtiHeapCallbacks callbacks; diff --git a/test/920-objects/objects.cc b/test/920-objects/objects.cc index 101ebb9acd..8fddc4a1f3 100644 --- a/test/920-objects/objects.cc +++ b/test/920-objects/objects.cc @@ -28,7 +28,7 @@ namespace art { namespace Test920Objects { extern "C" JNIEXPORT jlong JNICALL Java_art_Test920_getObjectSize( - JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED, jobject object) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass, jobject object) { jlong size; jvmtiError result = jvmti_env->GetObjectSize(object, &size); @@ -44,7 +44,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Test920_getObjectSize( } extern "C" JNIEXPORT jint JNICALL Java_art_Test920_getObjectHashCode( - JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED, jobject object) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass, jobject object) { jint hash; jvmtiError result = jvmti_env->GetObjectHashCode(object, &hash); diff --git a/test/922-properties/properties.cc b/test/922-properties/properties.cc index 6af45f54d6..eed0a00dee 100644 --- a/test/922-properties/properties.cc +++ b/test/922-properties/properties.cc @@ -30,7 +30,7 @@ namespace art { namespace Test922Properties { extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test922_getSystemProperties( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jint count; char** properties; jvmtiError result = jvmti_env->GetSystemProperties(&count, &properties); @@ -55,7 +55,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test922_getSystemProperties( } extern "C" JNIEXPORT jstring JNICALL Java_art_Test922_getSystemProperty( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring key) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring key) { ScopedUtfChars string(env, key); if (string.c_str() == nullptr) { return nullptr; @@ -75,7 +75,7 @@ extern "C" JNIEXPORT jstring JNICALL Java_art_Test922_getSystemProperty( } extern "C" JNIEXPORT void JNICALL Java_art_Test922_setSystemProperty( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring key, jstring value) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring key, jstring value) { ScopedUtfChars key_string(env, key); if (key_string.c_str() == nullptr) { return; diff --git a/test/923-monitors/monitors.cc b/test/923-monitors/monitors.cc index e4f3860d73..bfd00f83f0 100644 --- a/test/923-monitors/monitors.cc +++ b/test/923-monitors/monitors.cc @@ -38,7 +38,7 @@ static jrawMonitorID LongToMonitor(jlong l) { } extern "C" JNIEXPORT jlong JNICALL Java_art_Test923_createRawMonitor( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jrawMonitorID id; jvmtiError result = jvmti_env->CreateRawMonitor("placeholder", &id); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -48,37 +48,37 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Test923_createRawMonitor( } extern "C" JNIEXPORT void JNICALL Java_art_Test923_destroyRawMonitor( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) { jvmtiError result = jvmti_env->DestroyRawMonitor(LongToMonitor(l)); JvmtiErrorToException(env, jvmti_env, result); } extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorEnter( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) { jvmtiError result = jvmti_env->RawMonitorEnter(LongToMonitor(l)); JvmtiErrorToException(env, jvmti_env, result); } extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorExit( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) { jvmtiError result = jvmti_env->RawMonitorExit(LongToMonitor(l)); JvmtiErrorToException(env, jvmti_env, result); } extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorWait( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l, jlong millis) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l, jlong millis) { jvmtiError result = jvmti_env->RawMonitorWait(LongToMonitor(l), millis); JvmtiErrorToException(env, jvmti_env, result); } extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorNotify( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) { jvmtiError result = jvmti_env->RawMonitorNotify(LongToMonitor(l)); JvmtiErrorToException(env, jvmti_env, result); } extern "C" JNIEXPORT void JNICALL Java_art_Test923_rawMonitorNotifyAll( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jlong l) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jlong l) { jvmtiError result = jvmti_env->RawMonitorNotifyAll(LongToMonitor(l)); JvmtiErrorToException(env, jvmti_env, result); } diff --git a/test/924-threads/threads.cc b/test/924-threads/threads.cc index 8caff768c1..49f805ce4b 100644 --- a/test/924-threads/threads.cc +++ b/test/924-threads/threads.cc @@ -41,7 +41,7 @@ struct WaiterStruct { }; extern "C" JNIEXPORT jlong JNICALL Java_art_Test924_nativeWaiterStructAlloc( - JNIEnv* env, jclass TestClass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass TestClass) { WaiterStruct* s = nullptr; if (JvmtiErrorToException(env, jvmti_env, @@ -55,19 +55,19 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Test924_nativeWaiterStructAlloc( } extern "C" JNIEXPORT void JNICALL Java_art_Test924_nativeWaiterStructWaitForNative( - JNIEnv* env ATTRIBUTE_UNUSED, jclass TestClass ATTRIBUTE_UNUSED, jlong waiter_struct) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass TestClass, jlong waiter_struct) { WaiterStruct* s = reinterpret_cast<WaiterStruct*>(static_cast<intptr_t>(waiter_struct)); while (!s->started) { } } extern "C" JNIEXPORT void JNICALL Java_art_Test924_nativeWaiterStructFinish( - JNIEnv* env ATTRIBUTE_UNUSED, jclass TestClass ATTRIBUTE_UNUSED, jlong waiter_struct) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass TestClass, jlong waiter_struct) { WaiterStruct* s = reinterpret_cast<WaiterStruct*>(static_cast<intptr_t>(waiter_struct)); s->finish = true; } extern "C" JNIEXPORT void JNICALL Java_art_Test924_nativeLoop(JNIEnv* env, - jclass TestClass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass TestClass, jlong waiter_struct) { WaiterStruct* s = reinterpret_cast<WaiterStruct*>(static_cast<intptr_t>(waiter_struct)); s->started = true; @@ -79,7 +79,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test924_nativeLoop(JNIEnv* env, // private static native Object[] getThreadInfo(Thread t); extern "C" JNIEXPORT jthread JNICALL Java_art_Test924_getCurrentThread( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jthread thread = nullptr; jvmtiError result = jvmti_env->GetCurrentThread(&thread); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -89,7 +89,7 @@ extern "C" JNIEXPORT jthread JNICALL Java_art_Test924_getCurrentThread( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test924_getThreadInfo( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthread thread) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthread thread) { jvmtiThreadInfo info; memset(&info, 0, sizeof(jvmtiThreadInfo)); @@ -137,7 +137,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test924_getThreadInfo( } extern "C" JNIEXPORT jint JNICALL Java_art_Test924_getThreadState( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthread thread) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthread thread) { jint state; jvmtiError result = jvmti_env->GetThreadState(thread, &state); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -147,7 +147,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test924_getThreadState( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test924_getAllThreads( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jint thread_count; jthread* threads; @@ -167,7 +167,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test924_getAllThreads( } extern "C" JNIEXPORT jlong JNICALL Java_art_Test924_getTLS( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthread thread) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthread thread) { void* tls; jvmtiError result = jvmti_env->GetThreadLocalStorage(thread, &tls); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -177,7 +177,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Test924_getTLS( } extern "C" JNIEXPORT void JNICALL Java_art_Test924_setTLS( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthread thread, jlong val) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthread thread, jlong val) { const void* tls = reinterpret_cast<void*>(static_cast<uintptr_t>(val)); jvmtiError result = jvmti_env->SetThreadLocalStorage(thread, tls); JvmtiErrorToException(env, jvmti_env, result); @@ -223,7 +223,7 @@ static void JNICALL ThreadEnd(jvmtiEnv* jvmti_env, } extern "C" JNIEXPORT void JNICALL Java_art_Test924_enableThreadEvents( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jboolean b) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jboolean b) { if (b == JNI_FALSE) { jvmtiError ret = jvmti_env->SetEventNotificationMode(JVMTI_DISABLE, JVMTI_EVENT_THREAD_START, @@ -260,7 +260,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test924_enableThreadEvents( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test924_getThreadEventMessages( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { std::lock_guard<std::mutex> guard(gEventsMutex); jobjectArray ret = CreateObjectArray(env, static_cast<jint>(gEvents.size()), diff --git a/test/925-threadgroups/threadgroups.cc b/test/925-threadgroups/threadgroups.cc index cc053bcce0..915475671a 100644 --- a/test/925-threadgroups/threadgroups.cc +++ b/test/925-threadgroups/threadgroups.cc @@ -36,7 +36,7 @@ namespace Test925ThreadGroups { // private static native Object[] getThreadGroupChildren(); extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test925_getTopThreadGroups( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jthreadGroup* groups; jint group_count; jvmtiError result = jvmti_env->GetTopThreadGroups(&group_count, &groups); @@ -55,7 +55,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test925_getTopThreadGroups( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test925_getThreadGroupInfo( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthreadGroup group) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthreadGroup group) { jvmtiThreadGroupInfo info; jvmtiError result = jvmti_env->GetThreadGroupInfo(group, &info); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -87,7 +87,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test925_getThreadGroupInfo( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test925_getThreadGroupChildren( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jthreadGroup group) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jthreadGroup group) { jint thread_count; jthread* threads; jint threadgroup_count; diff --git a/test/927-timers/timers.cc b/test/927-timers/timers.cc index 9eaac719cd..bf20130796 100644 --- a/test/927-timers/timers.cc +++ b/test/927-timers/timers.cc @@ -32,7 +32,7 @@ namespace art { namespace Test926Timers { extern "C" JNIEXPORT jint JNICALL Java_art_Test927_getAvailableProcessors( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jint count; jvmtiError result = jvmti_env->GetAvailableProcessors(&count); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -42,7 +42,7 @@ extern "C" JNIEXPORT jint JNICALL Java_art_Test927_getAvailableProcessors( } extern "C" JNIEXPORT jlong JNICALL Java_art_Test927_getTime( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jlong time; jvmtiError result = jvmti_env->GetTime(&time); if (JvmtiErrorToException(env, jvmti_env, result)) { @@ -52,7 +52,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Test927_getTime( } extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Test927_getTimerInfo( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jvmtiTimerInfo info; jvmtiError result = jvmti_env->GetTimerInfo(&info); if (JvmtiErrorToException(env, jvmti_env, result)) { diff --git a/test/929-search/search.cc b/test/929-search/search.cc index 5516105abe..fb79c6ffb3 100644 --- a/test/929-search/search.cc +++ b/test/929-search/search.cc @@ -31,7 +31,7 @@ namespace art { namespace Test929Search { extern "C" JNIEXPORT void JNICALL Java_Main_addToBootClassLoader( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring segment) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring segment) { ScopedUtfChars utf(env, segment); if (utf.c_str() == nullptr) { return; @@ -41,7 +41,7 @@ extern "C" JNIEXPORT void JNICALL Java_Main_addToBootClassLoader( } extern "C" JNIEXPORT void JNICALL Java_Main_addToSystemClassLoader( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED, jstring segment) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass, jstring segment) { ScopedUtfChars utf(env, segment); if (utf.c_str() == nullptr) { return; diff --git a/test/931-agent-thread/agent_thread.cc b/test/931-agent-thread/agent_thread.cc index 391df4e439..16f6b274c9 100644 --- a/test/931-agent-thread/agent_thread.cc +++ b/test/931-agent-thread/agent_thread.cc @@ -90,7 +90,7 @@ static void AgentMain(jvmtiEnv* jenv, JNIEnv* env, void* arg) { } extern "C" JNIEXPORT void JNICALL Java_art_Test931_testAgentThread( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { // Create a Thread object. ScopedLocalRef<jobject> thread_name(env, env->NewStringUTF("Agent Thread")); if (thread_name.get() == nullptr) { diff --git a/test/933-misc-events/misc_events.cc b/test/933-misc-events/misc_events.cc index d2ae0f4cc1..2182bc059c 100644 --- a/test/933-misc-events/misc_events.cc +++ b/test/933-misc-events/misc_events.cc @@ -33,13 +33,13 @@ namespace Test933MiscEvents { static std::atomic<bool> saw_dump_request(false); -static void DumpRequestCallback(jvmtiEnv* jenv ATTRIBUTE_UNUSED) { +static void DumpRequestCallback([[maybe_unused]] jvmtiEnv* jenv) { printf("Received dump request.\n"); saw_dump_request.store(true, std::memory_order::memory_order_relaxed); } extern "C" JNIEXPORT void JNICALL Java_art_Test933_testSigQuit( - JNIEnv* env, jclass Main_klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass Main_klass) { jvmtiEventCallbacks callbacks; memset(&callbacks, 0, sizeof(jvmtiEventCallbacks)); callbacks.DataDumpRequest = DumpRequestCallback; diff --git a/test/936-search-onload/search_onload.cc b/test/936-search-onload/search_onload.cc index 23cea83be6..d69341393d 100644 --- a/test/936-search-onload/search_onload.cc +++ b/test/936-search-onload/search_onload.cc @@ -34,8 +34,8 @@ namespace art { namespace Test936SearchOnload { jint OnLoad(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0)) { printf("Unable to get jvmti env!\n"); return 1; diff --git a/test/945-obsolete-native/obsolete_native.cc b/test/945-obsolete-native/obsolete_native.cc index 418ce90637..29c7a808d4 100644 --- a/test/945-obsolete-native/obsolete_native.cc +++ b/test/945-obsolete-native/obsolete_native.cc @@ -32,7 +32,7 @@ namespace art { namespace Test945ObsoleteNative { extern "C" JNIEXPORT void JNICALL Java_art_Test945_00024Transform_doExecute( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject runnable) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject runnable) { jclass runnable_klass = env->FindClass("java/lang/Runnable"); jmethodID run_method = env->GetMethodID(runnable_klass, "run", "()V"); env->CallVoidMethod(runnable, run_method); diff --git a/test/980-redefine-object/redef_object.cc b/test/980-redefine-object/redef_object.cc index a8393dc1b3..76077183e1 100644 --- a/test/980-redefine-object/redef_object.cc +++ b/test/980-redefine-object/redef_object.cc @@ -38,10 +38,10 @@ namespace Test980RedefineObject { static void JNICALL RedefineObjectHook(jvmtiEnv *jvmti_env, JNIEnv* env, - jclass class_being_redefined ATTRIBUTE_UNUSED, - jobject loader ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass class_being_redefined, + [[maybe_unused]] jobject loader, const char* name, - jobject protection_domain ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject protection_domain, jint class_data_len, const unsigned char* class_data, jint* new_class_data_len, @@ -106,7 +106,7 @@ static void JNICALL RedefineObjectHook(jvmtiEnv *jvmti_env, } extern "C" JNIEXPORT void JNICALL Java_Main_addMemoryTrackingCall(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jclass obj_class, jthread thr) { jvmtiCapabilities caps {.can_retransform_classes = 1}; diff --git a/test/983-source-transform-verify/source_transform.cc b/test/983-source-transform-verify/source_transform.cc index 9e65a9964c..e778dbb65c 100644 --- a/test/983-source-transform-verify/source_transform.cc +++ b/test/983-source-transform-verify/source_transform.cc @@ -41,16 +41,16 @@ static void Println(JNIEnv* env, const char* msg) { } // The hook we are using. -void JNICALL CheckDexFileHook(jvmtiEnv* jvmti_env ATTRIBUTE_UNUSED, +void JNICALL CheckDexFileHook([[maybe_unused]] jvmtiEnv* jvmti_env, JNIEnv* env, jclass class_being_redefined, - jobject loader ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject loader, const char* name, - jobject protection_domain ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject protection_domain, jint class_data_len, const unsigned char* class_data, - jint* new_class_data_len ATTRIBUTE_UNUSED, - unsigned char** new_class_data ATTRIBUTE_UNUSED) { + [[maybe_unused]] jint* new_class_data_len, + [[maybe_unused]] unsigned char** new_class_data) { if (kSkipInitialLoad && class_being_redefined == nullptr) { // Something got loaded concurrently. Just ignore it for now. To make sure the test is // repeatable we only care about things that come from RetransformClasses. diff --git a/test/986-native-method-bind/native_bind.cc b/test/986-native-method-bind/native_bind.cc index 34e1f3539b..abb767c3b5 100644 --- a/test/986-native-method-bind/native_bind.cc +++ b/test/986-native-method-bind/native_bind.cc @@ -40,22 +40,22 @@ static void doUpPrintCall(JNIEnv* env, const char* function) { } extern "C" JNIEXPORT void JNICALL Java_art_Test986_00024Transform_sayHi__( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { doUpPrintCall(env, "doSayHi"); } extern "C" JNIEXPORT void JNICALL Java_art_Test986_00024Transform_sayHi2( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { doUpPrintCall(env, "doSayHi2"); } -extern "C" JNIEXPORT void JNICALL NoReallySayGoodbye(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT void JNICALL NoReallySayGoodbye(JNIEnv* env, [[maybe_unused]] jclass klass) { doUpPrintCall(env, "doSayBye"); } -static void doJvmtiMethodBind(jvmtiEnv* jvmtienv ATTRIBUTE_UNUSED, +static void doJvmtiMethodBind([[maybe_unused]] jvmtiEnv* jvmtienv, JNIEnv* env, - jthread thread ATTRIBUTE_UNUSED, + [[maybe_unused]] jthread thread, jmethodID m, void* address, /*out*/void** out_address) { @@ -95,7 +95,7 @@ static void doJvmtiMethodBind(jvmtiEnv* jvmtienv ATTRIBUTE_UNUSED, } extern "C" JNIEXPORT void JNICALL Java_art_Test986_setupNativeBindNotify( - JNIEnv* env ATTRIBUTE_UNUSED, jclass klass ATTRIBUTE_UNUSED) { + [[maybe_unused]] JNIEnv* env, [[maybe_unused]] jclass klass) { jvmtiEventCallbacks cb; memset(&cb, 0, sizeof(cb)); cb.NativeMethodBind = doJvmtiMethodBind; @@ -103,7 +103,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test986_setupNativeBindNotify( } extern "C" JNIEXPORT void JNICALL Java_art_Test986_setNativeBindNotify( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jboolean enable) { + JNIEnv* env, [[maybe_unused]] jclass klass, jboolean enable) { jvmtiError res = jvmti_env->SetEventNotificationMode(enable ? JVMTI_ENABLE : JVMTI_DISABLE, JVMTI_EVENT_NATIVE_METHOD_BIND, nullptr); @@ -113,7 +113,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Test986_setNativeBindNotify( } extern "C" JNIEXPORT void JNICALL Java_art_Test986_rebindTransformClass( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jclass k) { + JNIEnv* env, [[maybe_unused]] jclass klass, jclass k) { JNINativeMethod m[2]; m[0].name = "sayHi"; m[0].signature = "()V"; diff --git a/test/987-agent-bind/agent_bind.cc b/test/987-agent-bind/agent_bind.cc index 7dbdd8e29f..51fd74e2cf 100644 --- a/test/987-agent-bind/agent_bind.cc +++ b/test/987-agent-bind/agent_bind.cc @@ -40,12 +40,12 @@ static void doUpPrintCall(JNIEnv* env, const char* function) { } extern "C" JNIEXPORT void JNICALL Java_art_Test987_00024Transform_sayHi__( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { doUpPrintCall(env, "doSayHi"); } extern "C" JNIEXPORT void JNICALL Java_art_Test987_00024Transform_sayHi2( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass klass) { doUpPrintCall(env, "doSayHi2"); } diff --git a/test/989-method-trace-throw/method_trace.cc b/test/989-method-trace-throw/method_trace.cc index 019b6a9a24..edfff907a0 100644 --- a/test/989-method-trace-throw/method_trace.cc +++ b/test/989-method-trace-throw/method_trace.cc @@ -52,8 +52,8 @@ extern "C" JNIEXPORT jobject JNICALL Java_art_Test989_returnValueNative(JNIEnv* return env->CallStaticObjectMethod(klass, targetMethod); } -extern "C" JNIEXPORT void JNICALL Java_art_Test989_doNothingNative(JNIEnv* env ATTRIBUTE_UNUSED, - jclass klass ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT void JNICALL Java_art_Test989_doNothingNative([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass klass) { return; } diff --git a/test/992-source-data/source_file.cc b/test/992-source-data/source_file.cc index 78687ff005..9d98c640c1 100644 --- a/test/992-source-data/source_file.cc +++ b/test/992-source-data/source_file.cc @@ -38,7 +38,7 @@ namespace Test992SourceFile { extern "C" JNIEXPORT jstring JNICALL Java_art_Test992_getSourceFileName(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jclass target) { char* file = nullptr; if (JvmtiErrorToException(env, jvmti_env, jvmti_env->GetSourceFileName(target, &file))) { @@ -51,7 +51,7 @@ jstring JNICALL Java_art_Test992_getSourceFileName(JNIEnv* env, extern "C" JNIEXPORT jstring JNICALL Java_art_Test992_getSourceDebugExtension(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jclass target) { char* ext = nullptr; if (JvmtiErrorToException(env, jvmti_env, jvmti_env->GetSourceDebugExtension(target, &ext))) { diff --git a/test/993-breakpoints-non-debuggable/onload.cc b/test/993-breakpoints-non-debuggable/onload.cc index dbbcadcbed..f7e7f9d354 100644 --- a/test/993-breakpoints-non-debuggable/onload.cc +++ b/test/993-breakpoints-non-debuggable/onload.cc @@ -67,7 +67,7 @@ static const jvmtiCapabilities limited_caps = { .can_generate_resource_exhaustion_threads_events = 0, }; -jint OnLoad(JavaVM* vm, char* options ATTRIBUTE_UNUSED, void* reserved ATTRIBUTE_UNUSED) { +jint OnLoad(JavaVM* vm, [[maybe_unused]] char* options, [[maybe_unused]] void* reserved) { if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), kArtTiVersion) != 0) { printf("Unable to get jvmti env!\n"); return 1; diff --git a/test/993-breakpoints/breakpoints.cc b/test/993-breakpoints/breakpoints.cc index e9cf3b32c6..c0ee39201e 100644 --- a/test/993-breakpoints/breakpoints.cc +++ b/test/993-breakpoints/breakpoints.cc @@ -38,7 +38,7 @@ namespace Test993Breakpoints { extern "C" JNIEXPORT jobject JNICALL Java_art_Test993_constructNative(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobject target, jclass clazz) { jmethodID method = env->FromReflectedMethod(target); @@ -50,7 +50,7 @@ jobject JNICALL Java_art_Test993_constructNative(JNIEnv* env, extern "C" JNIEXPORT void JNICALL Java_art_Test993_invokeNativeObject(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobject target, jclass clazz, jobject thizz) { @@ -67,7 +67,7 @@ void JNICALL Java_art_Test993_invokeNativeObject(JNIEnv* env, extern "C" JNIEXPORT void JNICALL Java_art_Test993_invokeNativeBool(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobject target, jclass clazz, jobject thizz) { @@ -84,7 +84,7 @@ void JNICALL Java_art_Test993_invokeNativeBool(JNIEnv* env, extern "C" JNIEXPORT void JNICALL Java_art_Test993_invokeNativeLong(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobject target, jclass clazz, jobject thizz) { @@ -101,7 +101,7 @@ void JNICALL Java_art_Test993_invokeNativeLong(JNIEnv* env, extern "C" JNIEXPORT void JNICALL Java_art_Test993_invokeNative(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jobject target, jclass clazz, jobject thizz) { diff --git a/test/996-breakpoint-obsolete/obsolete_breakpoints.cc b/test/996-breakpoint-obsolete/obsolete_breakpoints.cc index 820af47f4c..f8f9173605 100644 --- a/test/996-breakpoint-obsolete/obsolete_breakpoints.cc +++ b/test/996-breakpoint-obsolete/obsolete_breakpoints.cc @@ -65,7 +65,7 @@ static jmethodID GetFirstObsoleteMethod(JNIEnv* env, jvmtiEnv* jvmti_env) { } extern "C" JNIEXPORT void JNICALL Java_art_Test996_setBreakpointOnObsoleteMethod( - JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jlong loc) { + JNIEnv* env, [[maybe_unused]] jclass k, jlong loc) { jmethodID method = GetFirstObsoleteMethod(env, jvmti_env); if (method == nullptr) { return; diff --git a/test/common/runtime_state.cc b/test/common/runtime_state.cc index 46f3828798..517eeaef96 100644 --- a/test/common/runtime_state.cc +++ b/test/common/runtime_state.cc @@ -72,7 +72,7 @@ extern "C" JNIEXPORT jboolean JNICALL Java_Main_hasOatFile(JNIEnv* env, jclass c } extern "C" JNIEXPORT jobject JNICALL Java_Main_getCompilerFilter(JNIEnv* env, - jclass caller ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass caller, jclass cls) { ScopedObjectAccess soa(env); @@ -91,22 +91,22 @@ extern "C" JNIEXPORT jobject JNICALL Java_Main_getCompilerFilter(JNIEnv* env, // public static native boolean runtimeIsSoftFail(); -extern "C" JNIEXPORT jboolean JNICALL Java_Main_runtimeIsSoftFail(JNIEnv* env ATTRIBUTE_UNUSED, - jclass cls ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT jboolean JNICALL Java_Main_runtimeIsSoftFail([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass cls) { return Runtime::Current()->IsVerificationSoftFail() ? JNI_TRUE : JNI_FALSE; } // public static native boolean hasImage(); -extern "C" JNIEXPORT jboolean JNICALL Java_Main_hasImage(JNIEnv* env ATTRIBUTE_UNUSED, - jclass cls ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT jboolean JNICALL Java_Main_hasImage([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass cls) { return Runtime::Current()->GetHeap()->HasBootImageSpace(); } // public static native boolean isImageDex2OatEnabled(); -extern "C" JNIEXPORT jboolean JNICALL Java_Main_isImageDex2OatEnabled(JNIEnv* env ATTRIBUTE_UNUSED, - jclass cls ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT jboolean JNICALL Java_Main_isImageDex2OatEnabled([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass cls) { return Runtime::Current()->IsImageDex2OatEnabled(); } @@ -453,14 +453,14 @@ extern "C" JNIEXPORT jboolean JNICALL Java_Main_isObsoleteObject(JNIEnv* env, jc } extern "C" JNIEXPORT void JNICALL Java_Main_forceInterpreterOnThread(JNIEnv* env, - jclass cls ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass cls) { ScopedObjectAccess soa(env); MutexLock thread_list_mu(soa.Self(), *Locks::thread_list_lock_); soa.Self()->IncrementForceInterpreterCount(); } -extern "C" JNIEXPORT void JNICALL Java_Main_setAsyncExceptionsThrown(JNIEnv* env ATTRIBUTE_UNUSED, - jclass cls ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT void JNICALL Java_Main_setAsyncExceptionsThrown([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass cls) { Runtime::Current()->SetAsyncExceptionsThrown(); } diff --git a/test/common/stack_inspect.cc b/test/common/stack_inspect.cc index 33f31e2875..3e737fdebc 100644 --- a/test/common/stack_inspect.cc +++ b/test/common/stack_inspect.cc @@ -38,8 +38,8 @@ static bool asserts_enabled = true; // public static native void disableStackFrameAsserts(); // Note: to globally disable asserts in unsupported configurations. -extern "C" JNIEXPORT void JNICALL Java_Main_disableStackFrameAsserts(JNIEnv* env ATTRIBUTE_UNUSED, - jclass cls ATTRIBUTE_UNUSED) { +extern "C" JNIEXPORT void JNICALL Java_Main_disableStackFrameAsserts([[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass cls) { asserts_enabled = false; } @@ -98,7 +98,7 @@ static bool IsMethodInterpreted(Thread* self, // TODO Remove 'require_deoptimizable' option once we have deoptimization through runtime frames. extern "C" JNIEXPORT jboolean JNICALL Java_Main_isInterpretedFunction( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method, jboolean require_deoptimizable) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject method, jboolean require_deoptimizable) { // Return false if this seems to not be an ART runtime. if (Runtime::Current() == nullptr) { return JNI_FALSE; @@ -185,7 +185,7 @@ extern "C" JNIEXPORT void JNICALL Java_Main_assertCallerIsManaged(JNIEnv* env, j } extern "C" JNIEXPORT jobject JNICALL Java_Main_getThisOfCaller( - JNIEnv* env, jclass cls ATTRIBUTE_UNUSED) { + JNIEnv* env, [[maybe_unused]] jclass cls) { ScopedObjectAccess soa(env); std::unique_ptr<art::Context> context(art::Context::Create()); jobject result = nullptr; diff --git a/test/ti-agent/agent_startup.cc b/test/ti-agent/agent_startup.cc index d6fd266334..5ebc78ae94 100644 --- a/test/ti-agent/agent_startup.cc +++ b/test/ti-agent/agent_startup.cc @@ -26,13 +26,13 @@ namespace art { // Utility functions for binding jni methods. extern "C" JNIEXPORT void JNICALL Java_art_Main_bindAgentJNI( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jstring className, jobject classLoader) { + JNIEnv* env, [[maybe_unused]] jclass klass, jstring className, jobject classLoader) { ScopedUtfChars name(env, className); BindFunctions(jvmti_env, env, name.c_str(), classLoader); } extern "C" JNIEXPORT void JNICALL Java_art_Main_bindAgentJNIForClass( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jclass bindClass) { + JNIEnv* env, [[maybe_unused]] jclass klass, jclass bindClass) { BindFunctionsOnClass(jvmti_env, env, bindClass); } diff --git a/test/ti-agent/breakpoint_helper.cc b/test/ti-agent/breakpoint_helper.cc index 83ba0a6342..19134cecb5 100644 --- a/test/ti-agent/breakpoint_helper.cc +++ b/test/ti-agent/breakpoint_helper.cc @@ -60,7 +60,7 @@ extern "C" void breakpointCB(jvmtiEnv* jvmti, extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Breakpoint_getLineNumberTableNative( JNIEnv* env, - jclass k ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass k, jobject target) { jmethodID method = env->FromReflectedMethod(target); if (env->ExceptionCheck()) { @@ -107,7 +107,7 @@ extern "C" JNIEXPORT jobjectArray JNICALL Java_art_Breakpoint_getLineNumberTable } extern "C" JNIEXPORT jlong JNICALL Java_art_Breakpoint_getStartLocation(JNIEnv* env, - jclass k ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass k, jobject target) { jmethodID method = env->FromReflectedMethod(target); if (env->ExceptionCheck()) { @@ -120,7 +120,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Breakpoint_getStartLocation(JNIEnv* } extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_clearBreakpoint(JNIEnv* env, - jclass k ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass k, jobject target, jlocation location) { jmethodID method = env->FromReflectedMethod(target); @@ -131,7 +131,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_clearBreakpoint(JNIEnv* en } extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_setBreakpoint(JNIEnv* env, - jclass k ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass k, jobject target, jlocation location) { jmethodID method = env->FromReflectedMethod(target); @@ -143,7 +143,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_setBreakpoint(JNIEnv* env, extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_startBreakpointWatch( JNIEnv* env, - jclass k ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass k, jclass method_klass, jobject method, jboolean allow_recursive, @@ -190,7 +190,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_startBreakpointWatch( extern "C" JNIEXPORT void JNICALL Java_art_Breakpoint_stopBreakpointWatch( JNIEnv* env, - jclass k ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass k, jthread thr) { if (JvmtiErrorToException(env, jvmti_env, jvmti_env->SetEventNotificationMode(JVMTI_DISABLE, diff --git a/test/ti-agent/common_load.cc b/test/ti-agent/common_load.cc index ff8b3a85a8..1f35aa0b34 100644 --- a/test/ti-agent/common_load.cc +++ b/test/ti-agent/common_load.cc @@ -58,8 +58,8 @@ struct AgentLib { // A trivial OnLoad implementation that only initializes the global jvmti_env. static jint MinimalOnLoad(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0) != 0) { printf("Unable to get jvmti env!\n"); return 1; diff --git a/test/ti-agent/early_return_helper.cc b/test/ti-agent/early_return_helper.cc index e4aa5d0961..df2703e680 100644 --- a/test/ti-agent/early_return_helper.cc +++ b/test/ti-agent/early_return_helper.cc @@ -27,37 +27,37 @@ namespace art { namespace common_early_return { extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_popFrame( - JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass k, jthread thr) { JvmtiErrorToException(env, jvmti_env, jvmti_env->PopFrame(thr)); } extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnFloat( - JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jfloat val) { + JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jfloat val) { JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnFloat(thr, val)); } extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnDouble( - JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jdouble val) { + JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jdouble val) { JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnDouble(thr, val)); } extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnLong( - JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jlong val) { + JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jlong val) { JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnLong(thr, val)); } extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnInt( - JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jint val) { + JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jint val) { JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnInt(thr, val)); } extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnVoid( - JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass k, jthread thr) { JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnVoid(thr)); } extern "C" JNIEXPORT void JNICALL Java_art_NonStandardExit_forceEarlyReturnObject( - JNIEnv* env, jclass k ATTRIBUTE_UNUSED, jthread thr, jobject val) { + JNIEnv* env, [[maybe_unused]] jclass k, jthread thr, jobject val) { JvmtiErrorToException(env, jvmti_env, jvmti_env->ForceEarlyReturnObject(thr, val)); } diff --git a/test/ti-agent/exceptions_helper.cc b/test/ti-agent/exceptions_helper.cc index e56c39b9eb..6095c2eeb6 100644 --- a/test/ti-agent/exceptions_helper.cc +++ b/test/ti-agent/exceptions_helper.cc @@ -107,7 +107,7 @@ static void exceptionCatchCB(jvmtiEnv* jvmti, extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_setupExceptionTracing( JNIEnv* env, - jclass exception ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass exception, jclass klass, jclass except, jobject exception_event, @@ -158,7 +158,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_setupExceptionTracing( } extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_enableExceptionCatchEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) { JvmtiErrorToException(env, jvmti_env, jvmti_env->SetEventNotificationMode(JVMTI_ENABLE, @@ -167,7 +167,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_enableExceptionCatchEvent( } extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_enableExceptionEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) { JvmtiErrorToException(env, jvmti_env, jvmti_env->SetEventNotificationMode(JVMTI_ENABLE, @@ -176,7 +176,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_enableExceptionEvent( } extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_disableExceptionCatchEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) { JvmtiErrorToException(env, jvmti_env, jvmti_env->SetEventNotificationMode(JVMTI_DISABLE, @@ -185,7 +185,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_disableExceptionCatchEvent } extern "C" JNIEXPORT void JNICALL Java_art_Exceptions_disableExceptionEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) { JvmtiErrorToException(env, jvmti_env, jvmti_env->SetEventNotificationMode(JVMTI_DISABLE, diff --git a/test/ti-agent/frame_pop_helper.cc b/test/ti-agent/frame_pop_helper.cc index f39e1854bc..45f3a063e1 100644 --- a/test/ti-agent/frame_pop_helper.cc +++ b/test/ti-agent/frame_pop_helper.cc @@ -34,7 +34,7 @@ struct FramePopData { static void framePopCB(jvmtiEnv* jvmti, JNIEnv* jnienv, jthread thr, - jmethodID method ATTRIBUTE_UNUSED, + [[maybe_unused]] jmethodID method, jboolean was_popped_by_exception) { FramePopData* data = nullptr; if (JvmtiErrorToException(jnienv, jvmti, diff --git a/test/ti-agent/redefinition_helper.cc b/test/ti-agent/redefinition_helper.cc index 706531ef31..c7028633e7 100644 --- a/test/ti-agent/redefinition_helper.cc +++ b/test/ti-agent/redefinition_helper.cc @@ -259,8 +259,8 @@ extern "C" JNIEXPORT void JNICALL Java_art_Redefinition_doCommonMultiClassRedefi // Get all capabilities except those related to retransformation. jint OnLoad(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0)) { printf("Unable to get jvmti env!\n"); return 1; @@ -322,13 +322,13 @@ extern "C" JNIEXPORT void JNICALL Java_art_Redefinition_addCommonTransformationR // The hook we are using. void JNICALL CommonClassFileLoadHookRetransformable(jvmtiEnv* jvmti_env, - JNIEnv* jni_env ATTRIBUTE_UNUSED, - jclass class_being_redefined ATTRIBUTE_UNUSED, - jobject loader ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIEnv* jni_env, + [[maybe_unused]] jclass class_being_redefined, + [[maybe_unused]] jobject loader, const char* name, - jobject protection_domain ATTRIBUTE_UNUSED, - jint class_data_len ATTRIBUTE_UNUSED, - const unsigned char* class_dat ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject protection_domain, + [[maybe_unused]] jint class_data_len, + [[maybe_unused]] const unsigned char* class_dat, jint* new_class_data_len, unsigned char** new_class_data) { std::string name_str(name); @@ -435,8 +435,8 @@ extern "C" JNIEXPORT void JNICALL Java_art_Redefinition_doCommonClassRetransform // Get all capabilities except those related to retransformation. jint OnLoad(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0)) { printf("Unable to get jvmti env!\n"); return 1; @@ -451,8 +451,8 @@ namespace common_transform { // Get all capabilities except those related to retransformation. jint OnLoad(JavaVM* vm, - char* options ATTRIBUTE_UNUSED, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { if (vm->GetEnv(reinterpret_cast<void**>(&jvmti_env), JVMTI_VERSION_1_0)) { printf("Unable to get jvmti env!\n"); return 1; diff --git a/test/ti-agent/suspend_event_helper.cc b/test/ti-agent/suspend_event_helper.cc index cbc54d4c6d..71b8681a41 100644 --- a/test/ti-agent/suspend_event_helper.cc +++ b/test/ti-agent/suspend_event_helper.cc @@ -113,8 +113,8 @@ void JNICALL cbExceptionCatch(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, jmethodID method, - jlocation location ATTRIBUTE_UNUSED, - jobject exception ATTRIBUTE_UNUSED) { + [[maybe_unused]] jlocation location, + [[maybe_unused]] jobject exception) { TestData* data; if (JvmtiErrorToException( env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -131,10 +131,10 @@ void JNICALL cbException(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, jmethodID method, - jlocation location ATTRIBUTE_UNUSED, - jobject exception ATTRIBUTE_UNUSED, - jmethodID catch_method ATTRIBUTE_UNUSED, - jlocation catch_location ATTRIBUTE_UNUSED) { + [[maybe_unused]] jlocation location, + [[maybe_unused]] jobject exception, + [[maybe_unused]] jmethodID catch_method, + [[maybe_unused]] jlocation catch_location) { TestData* data; if (JvmtiErrorToException( env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -164,8 +164,8 @@ void JNICALL cbMethodExit(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, jmethodID method, - jboolean was_popped_by_exception ATTRIBUTE_UNUSED, - jvalue return_value ATTRIBUTE_UNUSED) { + [[maybe_unused]] jboolean was_popped_by_exception, + [[maybe_unused]] jvalue return_value) { TestData* data; if (JvmtiErrorToException( env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -181,13 +181,13 @@ void JNICALL cbMethodExit(jvmtiEnv* jvmti, void JNICALL cbFieldModification(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, - jmethodID method ATTRIBUTE_UNUSED, - jlocation location ATTRIBUTE_UNUSED, - jclass field_klass ATTRIBUTE_UNUSED, - jobject object ATTRIBUTE_UNUSED, + [[maybe_unused]] jmethodID method, + [[maybe_unused]] jlocation location, + [[maybe_unused]] jclass field_klass, + [[maybe_unused]] jobject object, jfieldID field, - char signature_type ATTRIBUTE_UNUSED, - jvalue new_value ATTRIBUTE_UNUSED) { + [[maybe_unused]] char signature_type, + [[maybe_unused]] jvalue new_value) { TestData* data; if (JvmtiErrorToException( env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -204,10 +204,10 @@ void JNICALL cbFieldModification(jvmtiEnv* jvmti, void JNICALL cbFieldAccess(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, - jmethodID method ATTRIBUTE_UNUSED, - jlocation location ATTRIBUTE_UNUSED, + [[maybe_unused]] jmethodID method, + [[maybe_unused]] jlocation location, jclass field_klass, - jobject object ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject object, jfieldID field) { TestData* data; if (JvmtiErrorToException( @@ -247,8 +247,8 @@ cbBreakpointHit(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, jmethodID method, jlo void JNICALL cbFramePop(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, - jmethodID method ATTRIBUTE_UNUSED, - jboolean was_popped_by_exception ATTRIBUTE_UNUSED) { + [[maybe_unused]] jmethodID method, + [[maybe_unused]] jboolean was_popped_by_exception) { TestData* data; if (JvmtiErrorToException( env, jvmti, jvmti->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -281,7 +281,7 @@ void JNICALL cbClassLoadOrPrepare(jvmtiEnv* jvmti, JNIEnv* env, jthread thr, jcl } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupTest(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED) { + [[maybe_unused]] jclass klass) { jvmtiCapabilities caps; memset(&caps, 0, sizeof(caps)); // Most of these will already be there but might as well be complete. @@ -374,7 +374,7 @@ static TestData* SetupTestData(JNIEnv* env, extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendClassEvent(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jint event_num, jobjectArray interesting_names, jthread thr) { @@ -409,7 +409,7 @@ Java_art_SuspendEvents_setupSuspendClassEvent(JNIEnv* env, } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendClassEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -432,7 +432,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendClassEvent( } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendSingleStepAt( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject meth, jlocation loc, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jobject meth, jlocation loc, jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -453,7 +453,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendSingleStepA } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendSingleStepFor( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -470,7 +470,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendSingleStepF } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendPopFrameEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jint offset, jobject breakpoint_func, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jint offset, jobject breakpoint_func, jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -501,7 +501,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendPopFrameEve } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendPopFrameEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -528,7 +528,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendPopFrameEve } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendBreakpointFor( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject meth, jlocation loc, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jobject meth, jlocation loc, jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -553,7 +553,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendBreakpointF } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendBreakpointFor( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -577,7 +577,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendBreakpointF } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendExceptionEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method, jboolean is_catch, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jobject method, jboolean is_catch, jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -599,7 +599,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendExceptionEv } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendExceptionEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -622,7 +622,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendExceptionEv } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendMethodEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jobject method, jboolean enter, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jobject method, jboolean enter, jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -644,7 +644,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupSuspendMethodEvent } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendMethodEvent( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -668,7 +668,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearSuspendMethodEvent extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupFieldSuspendFor(JNIEnv* env, - jclass klass ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass klass, jclass target_klass, jobject field, jboolean access, @@ -706,7 +706,7 @@ Java_art_SuspendEvents_setupFieldSuspendFor(JNIEnv* env, } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearFieldSuspendFor( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -744,7 +744,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearFieldSuspendFor( } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupWaitForNativeCall( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -761,7 +761,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_setupWaitForNativeCall( } extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearWaitForNativeCall( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass , jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { @@ -775,7 +775,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_SuspendEvents_clearWaitForNativeCall( } extern "C" JNIEXPORT void JNICALL -Java_art_SuspendEvents_waitForSuspendHit(JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { +Java_art_SuspendEvents_waitForSuspendHit(JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) { TestData* data; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetThreadLocalStorage(thr, reinterpret_cast<void**>(&data)))) { diff --git a/test/ti-agent/ti_utf.h b/test/ti-agent/ti_utf.h index 15fe22ce5a..cfde098bb9 100644 --- a/test/ti-agent/ti_utf.h +++ b/test/ti-agent/ti_utf.h @@ -179,7 +179,7 @@ inline void ConvertUtf16ToModifiedUtf8(char* utf8_out, inline size_t CountModifiedUtf8BytesInUtf16(const uint16_t* chars, size_t char_count) { // FIXME: We should not emit 4-byte sequences. Bug: 192935764 size_t result = 0; - auto append = [&](char c ATTRIBUTE_UNUSED) { ++result; }; + auto append = [&]([[maybe_unused]] char c) { ++result; }; ConvertUtf16ToUtf8</*kUseShortZero=*/ false, /*kUse4ByteSequence=*/ true, /*kReplaceBadSurrogates=*/ false>(chars, char_count, append); diff --git a/test/ti-agent/trace_helper.cc b/test/ti-agent/trace_helper.cc index 11e1c15757..58958cb268 100644 --- a/test/ti-agent/trace_helper.cc +++ b/test/ti-agent/trace_helper.cc @@ -303,7 +303,7 @@ static void methodEntryCB(jvmtiEnv* jvmti, static void classPrepareCB(jvmtiEnv* jvmti, JNIEnv* jnienv, - jthread thr ATTRIBUTE_UNUSED, + [[maybe_unused]] jthread thr, jclass klass) { TraceData* data = nullptr; if (JvmtiErrorToException(jnienv, jvmti, @@ -441,7 +441,7 @@ static bool GetFieldAndClass(JNIEnv* env, extern "C" JNIEXPORT void JNICALL Java_art_Trace_watchFieldModification( JNIEnv* env, - jclass trace ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass trace, jobject field_obj) { jfieldID field; jclass klass; @@ -455,7 +455,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Trace_watchFieldModification( extern "C" JNIEXPORT void JNICALL Java_art_Trace_watchFieldAccess( JNIEnv* env, - jclass trace ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass trace, jobject field_obj) { jfieldID field; jclass klass; @@ -468,7 +468,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Trace_watchFieldAccess( extern "C" JNIEXPORT void JNICALL Java_art_Trace_enableTracing2( JNIEnv* env, - jclass trace ATTRIBUTE_UNUSED, + [[maybe_unused]] jclass trace, jclass klass, jobject enter, jobject exit, @@ -610,7 +610,7 @@ extern "C" JNIEXPORT void JNICALL Java_art_Trace_enableTracing( } extern "C" JNIEXPORT void JNICALL Java_art_Trace_disableTracing( - JNIEnv* env, jclass klass ATTRIBUTE_UNUSED, jthread thr) { + JNIEnv* env, [[maybe_unused]] jclass klass, jthread thr) { TraceData* data = nullptr; if (JvmtiErrorToException( env, jvmti_env, jvmti_env->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&data)))) { diff --git a/test/ti-stress/stress.cc b/test/ti-stress/stress.cc index cd7af1064d..4ba4564247 100644 --- a/test/ti-stress/stress.cc +++ b/test/ti-stress/stress.cc @@ -611,11 +611,11 @@ void JNICALL SingleStepHook(jvmtiEnv* jvmtienv, // The hook we are using. void JNICALL ClassFileLoadHookSecretNoOp(jvmtiEnv* jvmti, - JNIEnv* jni_env ATTRIBUTE_UNUSED, - jclass class_being_redefined ATTRIBUTE_UNUSED, - jobject loader ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIEnv* jni_env, + [[maybe_unused]] jclass class_being_redefined, + [[maybe_unused]] jobject loader, const char* name, - jobject protection_domain ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject protection_domain, jint class_data_len, const unsigned char* class_data, jint* new_class_data_len, @@ -679,7 +679,7 @@ static void ReadOptions(StressData* data, char* options) { // Do final setup during the VMInit callback. By this time most things are all setup. static void JNICALL PerformFinalSetupVMInit(jvmtiEnv *jvmti_env, JNIEnv* jni_env, - jthread thread ATTRIBUTE_UNUSED) { + [[maybe_unused]] jthread thread) { // Load the VMClassLoader class. We will get a ClassNotFound exception because we don't have // visibility but the class will be loaded behind the scenes. LOG(INFO) << "manual load & initialization of class java/lang/VMClassLoader!"; @@ -754,7 +754,7 @@ static bool WatchAllFields(JavaVM* vm, jvmtiEnv* jvmti) { extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* vm, char* options, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* reserved) { jvmtiEnv* jvmti = nullptr; if (vm->GetEnv(reinterpret_cast<void**>(&jvmti), JVMTI_VERSION_1_0)) { LOG(ERROR) << "Unable to get jvmti env."; diff --git a/tools/fuzzer/libart_verify_dex_fuzzer.cc b/tools/fuzzer/libart_verify_dex_fuzzer.cc index 8c57da304a..96a02edfac 100644 --- a/tools/fuzzer/libart_verify_dex_fuzzer.cc +++ b/tools/fuzzer/libart_verify_dex_fuzzer.cc @@ -17,7 +17,7 @@ #include "base/mem_map.h" #include "dex/dex_file_loader.h" -extern "C" int LLVMFuzzerInitialize(int* argc ATTRIBUTE_UNUSED, char*** argv ATTRIBUTE_UNUSED) { +extern "C" int LLVMFuzzerInitialize([[maybe_unused]] int* argc, [[maybe_unused]] char*** argv) { // Initialize environment. // TODO(solanes): `art::MemMap::Init` is not needed for the current DexFileLoader code path. // Consider removing it once the fuzzer stabilizes and check that it is actually not needed. diff --git a/tools/jvmti-agents/breakpoint-logger/breakpoint_logger.cc b/tools/jvmti-agents/breakpoint-logger/breakpoint_logger.cc index 2f8b68239b..25bf794869 100644 --- a/tools/jvmti-agents/breakpoint-logger/breakpoint_logger.cc +++ b/tools/jvmti-agents/breakpoint-logger/breakpoint_logger.cc @@ -36,7 +36,7 @@ struct BreakpointTargets { std::vector<SingleBreakpointTarget> bps; }; -static void VMInitCB(jvmtiEnv* jvmti, JNIEnv* env, jthread thr ATTRIBUTE_UNUSED) { +static void VMInitCB(jvmtiEnv* jvmti, JNIEnv* env, [[maybe_unused]] jthread thr) { BreakpointTargets* all_targets = nullptr; jvmtiError err = jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&all_targets)); if (err != JVMTI_ERROR_NONE || all_targets == nullptr) { @@ -350,7 +350,7 @@ enum class StartType { static jint AgentStart(StartType start, JavaVM* vm, char* options, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* reserved) { jvmtiEnv* jvmti = nullptr; jvmtiError error = JVMTI_ERROR_NONE; { diff --git a/tools/jvmti-agents/dump-jvmti-state/dump-jvmti.cc b/tools/jvmti-agents/dump-jvmti-state/dump-jvmti.cc index 71a0115999..afa8d611aa 100644 --- a/tools/jvmti-agents/dump-jvmti-state/dump-jvmti.cc +++ b/tools/jvmti-agents/dump-jvmti-state/dump-jvmti.cc @@ -88,7 +88,9 @@ static void CbDataDump(jvmtiEnv* jvmti) { } // namespace -static jint AgentStart(JavaVM* vm, char* options ATTRIBUTE_UNUSED, void* reserved ATTRIBUTE_UNUSED) { +static jint AgentStart(JavaVM* vm, + [[maybe_unused]] char* options, + [[maybe_unused]] void* reserved) { jvmtiEnv* jvmti = nullptr; if (SetupJvmtiEnv(vm, &jvmti) != JNI_OK) { LOG(ERROR) << "Could not get JVMTI env or ArtTiEnv!"; diff --git a/tools/jvmti-agents/enable-vlog/enablevlog.cc b/tools/jvmti-agents/enable-vlog/enablevlog.cc index 7bee0132da..d42b0ff17e 100644 --- a/tools/jvmti-agents/enable-vlog/enablevlog.cc +++ b/tools/jvmti-agents/enable-vlog/enablevlog.cc @@ -89,7 +89,7 @@ static jint SetupJvmtiEnv(JavaVM* vm, jvmtiEnv** jvmti) { } // namespace -static jint AgentStart(JavaVM* vm, char* options, void* reserved ATTRIBUTE_UNUSED) { +static jint AgentStart(JavaVM* vm, char* options, [[maybe_unused]] void* reserved) { jvmtiEnv* jvmti = nullptr; if (SetupJvmtiEnv(vm, &jvmti) != JNI_OK) { LOG(ERROR) << "Could not get JVMTI env or ArtTiEnv!"; diff --git a/tools/jvmti-agents/field-counts/fieldcount.cc b/tools/jvmti-agents/field-counts/fieldcount.cc index 5a4b00e7a5..526d68f021 100644 --- a/tools/jvmti-agents/field-counts/fieldcount.cc +++ b/tools/jvmti-agents/field-counts/fieldcount.cc @@ -195,7 +195,7 @@ static void DataDumpRequestCb(jvmtiEnv* jvmti) { } } -static void VMDeathCb(jvmtiEnv* jvmti, JNIEnv* env ATTRIBUTE_UNUSED) { +static void VMDeathCb(jvmtiEnv* jvmti, [[maybe_unused]] JNIEnv* env) { DataDumpRequestCb(jvmti); RequestList* list = nullptr; CHECK_JVMTI(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&list))); @@ -211,7 +211,7 @@ static void CreateFieldList(jvmtiEnv* jvmti, JNIEnv* env, const std::string& arg CHECK_JVMTI(jvmti->SetEnvironmentLocalStorage(list)); } -static void VMInitCb(jvmtiEnv* jvmti, JNIEnv* env, jobject thr ATTRIBUTE_UNUSED) { +static void VMInitCb(jvmtiEnv* jvmti, JNIEnv* env, [[maybe_unused]] jobject thr) { char* args = nullptr; CHECK_JVMTI(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&args))); CHECK_JVMTI(jvmti->SetEnvironmentLocalStorage(nullptr)); @@ -260,14 +260,14 @@ static jint AgentStart(JavaVM* vm, char* options, bool is_onload) { // Late attachment (e.g. 'am attach-agent'). extern "C" JNIEXPORT jint JNICALL Agent_OnAttach(JavaVM* vm, char* options, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* reserved) { return AgentStart(vm, options, /*is_onload=*/false); } // Early attachment extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* jvm, char* options, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* reserved) { return AgentStart(jvm, options, /*is_onload=*/true); } diff --git a/tools/jvmti-agents/field-null-percent/fieldnull.cc b/tools/jvmti-agents/field-null-percent/fieldnull.cc index 016164f136..b2cd13b686 100644 --- a/tools/jvmti-agents/field-null-percent/fieldnull.cc +++ b/tools/jvmti-agents/field-null-percent/fieldnull.cc @@ -140,7 +140,7 @@ static void DataDumpRequestCb(jvmtiEnv* jvmti) { } } -static void VMDeathCb(jvmtiEnv* jvmti, JNIEnv* env ATTRIBUTE_UNUSED) { +static void VMDeathCb(jvmtiEnv* jvmti, [[maybe_unused]] JNIEnv* env) { DataDumpRequestCb(jvmti); RequestList* list = nullptr; CHECK_JVMTI(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&list))); @@ -154,7 +154,7 @@ static void CreateFieldList(jvmtiEnv* jvmti, JNIEnv* env, const std::string& arg CHECK_JVMTI(jvmti->SetEnvironmentLocalStorage(list)); } -static void VMInitCb(jvmtiEnv* jvmti, JNIEnv* env, jobject thr ATTRIBUTE_UNUSED) { +static void VMInitCb(jvmtiEnv* jvmti, JNIEnv* env, [[maybe_unused]] jobject thr) { char* args = nullptr; CHECK_JVMTI(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&args))); CHECK_JVMTI(jvmti->SetEnvironmentLocalStorage(nullptr)); @@ -201,16 +201,16 @@ static jint AgentStart(JavaVM* vm, char* options, bool is_onload) { } // Late attachment (e.g. 'am attach-agent'). -extern "C" JNIEXPORT jint JNICALL Agent_OnAttach(JavaVM *vm, +extern "C" JNIEXPORT jint JNICALL Agent_OnAttach(JavaVM* vm, char* options, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* reserved) { return AgentStart(vm, options, /*is_onload=*/false); } // Early attachment extern "C" JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* jvm, char* options, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* reserved) { return AgentStart(jvm, options, /*is_onload=*/true); } diff --git a/tools/jvmti-agents/jit-load/jitload.cc b/tools/jvmti-agents/jit-load/jitload.cc index 6ef7b67e47..f5d6ff4acb 100644 --- a/tools/jvmti-agents/jit-load/jitload.cc +++ b/tools/jvmti-agents/jit-load/jitload.cc @@ -51,8 +51,8 @@ static jthread GetJitThread() { } JNICALL void VmInitCb(jvmtiEnv* jvmti, - JNIEnv* env ATTRIBUTE_UNUSED, - jthread curthread ATTRIBUTE_UNUSED) { + [[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jthread curthread) { jthread jit_thread = GetJitThread(); if (jit_thread != nullptr) { CHECK_EQ(jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_CLASS_PREPARE, jit_thread), @@ -72,8 +72,8 @@ JNICALL static void DataDumpRequestCb(jvmtiEnv* jvmti) { } JNICALL void ClassPrepareJit(jvmtiEnv* jvmti, - JNIEnv* jni_env ATTRIBUTE_UNUSED, - jthread thr ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIEnv* jni_env, + [[maybe_unused]] jthread thr, jclass klass) { AgentOptions* ops; CHECK_CALL_SUCCESS(jvmti->GetEnvironmentLocalStorage(reinterpret_cast<void**>(&ops))); @@ -85,9 +85,7 @@ JNICALL void ClassPrepareJit(jvmtiEnv* jvmti, CHECK_CALL_SUCCESS(jvmti->Deallocate(reinterpret_cast<unsigned char*>(klass_name))); } -JNICALL void VMDeathCb(jvmtiEnv* jvmti, JNIEnv* env ATTRIBUTE_UNUSED) { - DataDumpRequestCb(jvmti); -} +JNICALL void VMDeathCb(jvmtiEnv* jvmti, [[maybe_unused]] JNIEnv* env) { DataDumpRequestCb(jvmti); } static jvmtiEnv* SetupJvmti(JavaVM* vm, const char* options) { android::base::InitLogging(/* argv= */nullptr); diff --git a/tools/jvmti-agents/list-extensions/list-extensions.cc b/tools/jvmti-agents/list-extensions/list-extensions.cc index 6d8237aeb7..cce42e489f 100644 --- a/tools/jvmti-agents/list-extensions/list-extensions.cc +++ b/tools/jvmti-agents/list-extensions/list-extensions.cc @@ -147,7 +147,7 @@ jint SetupJvmtiEnv(JavaVM* vm) { return JNI_OK; } -jint AgentStart(JavaVM* vm, char* options ATTRIBUTE_UNUSED, void* reserved ATTRIBUTE_UNUSED) { +jint AgentStart(JavaVM* vm, [[maybe_unused]] char* options, [[maybe_unused]] void* reserved) { if (SetupJvmtiEnv(vm) != JNI_OK) { LOG(ERROR) << "Could not get JVMTI env or ArtTiEnv!"; return JNI_ERR; diff --git a/tools/jvmti-agents/simple-force-redefine/forceredefine.cc b/tools/jvmti-agents/simple-force-redefine/forceredefine.cc index 34742388cb..72e5fe01af 100644 --- a/tools/jvmti-agents/simple-force-redefine/forceredefine.cc +++ b/tools/jvmti-agents/simple-force-redefine/forceredefine.cc @@ -134,11 +134,11 @@ static void Transform(const std::shared_ptr<ir::DexFile>& ir) { } static void CbClassFileLoadHook(jvmtiEnv* jvmti, - JNIEnv* env ATTRIBUTE_UNUSED, - jclass classBeingRedefined ATTRIBUTE_UNUSED, - jobject loader ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIEnv* env, + [[maybe_unused]] jclass classBeingRedefined, + [[maybe_unused]] jobject loader, const char* name, - jobject protectionDomain ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject protectionDomain, jint classDataLen, const unsigned char* classData, jint* newClassDataLen, @@ -212,7 +212,7 @@ static void RedefineClass(jvmtiEnv* jvmti, JNIEnv* env, const std::string& klass env->DeleteLocalRef(klass); } -static void AgentMain(jvmtiEnv* jvmti, JNIEnv* jni, void* arg ATTRIBUTE_UNUSED) { +static void AgentMain(jvmtiEnv* jvmti, JNIEnv* jni, [[maybe_unused]] void* arg) { AgentInfo* ai = GetAgentInfo(jvmti); std::string klass_name; jvmti->SetEventNotificationMode(JVMTI_ENABLE, JVMTI_EVENT_CLASS_FILE_LOAD_HOOK, nullptr); @@ -227,7 +227,7 @@ static void AgentMain(jvmtiEnv* jvmti, JNIEnv* jni, void* arg ATTRIBUTE_UNUSED) } } -static void CbVmInit(jvmtiEnv* jvmti, JNIEnv* env, jthread thr ATTRIBUTE_UNUSED) { +static void CbVmInit(jvmtiEnv* jvmti, JNIEnv* env, [[maybe_unused]] jthread thr) { // Create a Thread object. ScopedLocalRef<jobject> thread_name(env, env->NewStringUTF("Agent Thread")); if (thread_name.get() == nullptr) { @@ -263,7 +263,7 @@ static void CbVmInit(jvmtiEnv* jvmti, JNIEnv* env, jthread thr ATTRIBUTE_UNUSED) } // namespace template <bool kIsOnLoad> -static jint AgentStart(JavaVM* vm, char* options, void* reserved ATTRIBUTE_UNUSED) { +static jint AgentStart(JavaVM* vm, char* options, [[maybe_unused]] void* reserved) { jvmtiEnv* jvmti = nullptr; if (vm->GetEnv(reinterpret_cast<void**>(&jvmti), JVMTI_VERSION_1_1) != JNI_OK || diff --git a/tools/jvmti-agents/simple-profile/simple_profile.cc b/tools/jvmti-agents/simple-profile/simple_profile.cc index 7161142839..9ea99d5c51 100644 --- a/tools/jvmti-agents/simple-profile/simple_profile.cc +++ b/tools/jvmti-agents/simple-profile/simple_profile.cc @@ -192,7 +192,7 @@ void SimpleProfileData::FinishInitialization(jvmtiEnv* jvmti, JNIEnv* env, jthre CHECK_JVMTI(jvmti->RunAgentThread( thread.get(), - [](jvmtiEnv* jvmti, JNIEnv* jni, void* unused_data ATTRIBUTE_UNUSED) { + [](jvmtiEnv* jvmti, JNIEnv* jni, [[maybe_unused]] void* unused_data) { SimpleProfileData* data = SimpleProfileData::GetProfileData(jvmti); data->RunDumpLoop(jvmti, jni); }, @@ -354,7 +354,7 @@ static void DataDumpCb(jvmtiEnv* jvmti_env) { static void MethodEntryCB(jvmtiEnv* jvmti_env, JNIEnv* env, - jthread thread ATTRIBUTE_UNUSED, + [[maybe_unused]] jthread thread, jmethodID method) { SimpleProfileData* data = SimpleProfileData::GetProfileData(jvmti_env); data->Enter(jvmti_env, env, method); @@ -418,7 +418,7 @@ static jint SetupJvmtiEnv(JavaVM* vm, jvmtiEnv** jvmti) { static jint AgentStart(StartType start, JavaVM* vm, const char* options, - void* reserved ATTRIBUTE_UNUSED) { + [[maybe_unused]] void* reserved) { if (options == nullptr) { options = ""; } @@ -476,7 +476,7 @@ static jint AgentStart(StartType start, callbacks.VMInit = &VMInitCB; callbacks.DataDumpRequest = &DataDumpCb; callbacks.VMDeath = &VMDeathCB; - callbacks.ThreadEnd = [](jvmtiEnv* env, JNIEnv* jni, jthread thr ATTRIBUTE_UNUSED) { + callbacks.ThreadEnd = [](jvmtiEnv* env, JNIEnv* jni, [[maybe_unused]] jthread thr) { VMDeathCB(env, jni); }; diff --git a/tools/jvmti-agents/ti-alloc-sample/ti_alloc_sample.cc b/tools/jvmti-agents/ti-alloc-sample/ti_alloc_sample.cc index d719db5af8..1b359b8fb0 100644 --- a/tools/jvmti-agents/ti-alloc-sample/ti_alloc_sample.cc +++ b/tools/jvmti-agents/ti-alloc-sample/ti_alloc_sample.cc @@ -310,7 +310,7 @@ static int stack_depth_limit; static void JNICALL logVMObjectAlloc(jvmtiEnv* jvmti, JNIEnv* jni, jthread thread, - jobject obj ATTRIBUTE_UNUSED, + [[maybe_unused]] jobject obj, jclass klass, jlong size) { // Sample only once out of sampling_rate tries, and prevent recursive allocation tracking, @@ -407,9 +407,7 @@ static bool ProcessOptions(std::string options) { return true; } -static jint AgentStart(JavaVM* vm, - char* options, - void* reserved ATTRIBUTE_UNUSED) { +static jint AgentStart(JavaVM* vm, char* options, [[maybe_unused]] void* reserved) { // Handle the sampling rate, depth limit, and output path, if set. if (!ProcessOptions(options)) { return JNI_ERR; diff --git a/tools/jvmti-agents/ti-fast/tifast.cc b/tools/jvmti-agents/ti-fast/tifast.cc index bb49aa1371..4c182c8b77 100644 --- a/tools/jvmti-agents/ti-fast/tifast.cc +++ b/tools/jvmti-agents/ti-fast/tifast.cc @@ -37,7 +37,8 @@ namespace { // env. static constexpr jint kArtTiVersion = JVMTI_VERSION_1_2 | 0x40000000; -template <typename ...Args> static void Unused(Args... args ATTRIBUTE_UNUSED) {} +template <typename... Args> +static void Unused([[maybe_unused]] Args... args) {} // jthread is a typedef of jobject so we use this to allow the templates to distinguish them. struct jthreadContainer { jthread thread; }; @@ -407,7 +408,8 @@ class LogPrinter { }; // Base case -template<> void LogPrinter::PrintRest(jvmtiEnv* jvmti ATTRIBUTE_UNUSED, JNIEnv* jni) { +template <> +void LogPrinter::PrintRest([[maybe_unused]] jvmtiEnv* jvmti, JNIEnv* jni) { if (jni == nullptr) { start_args = "jvmtiEnv*"; } else { @@ -668,9 +670,7 @@ static jint SetupJvmtiEnv(JavaVM* vm, jvmtiEnv** jvmti) { } // namespace -static jint AgentStart(JavaVM* vm, - char* options, - void* reserved ATTRIBUTE_UNUSED) { +static jint AgentStart(JavaVM* vm, char* options, [[maybe_unused]] void* reserved) { jvmtiEnv* jvmti = nullptr; jvmtiError error = JVMTI_ERROR_NONE; if (SetupJvmtiEnv(vm, &jvmti) != JNI_OK) { diff --git a/tools/jvmti-agents/titrace/titrace.cc b/tools/jvmti-agents/titrace/titrace.cc index d9fab25312..7178455590 100644 --- a/tools/jvmti-agents/titrace/titrace.cc +++ b/tools/jvmti-agents/titrace/titrace.cc @@ -207,8 +207,8 @@ struct TraceStatistics { struct EventCallbacks { static void SingleStep(jvmtiEnv* jvmti_env, - JNIEnv* jni_env ATTRIBUTE_UNUSED, - jthread thread ATTRIBUTE_UNUSED, + [[maybe_unused]] JNIEnv* jni_env, + [[maybe_unused]] jthread thread, jmethodID method, jlocation location) { TraceStatistics& stats = TraceStatistics::GetSingleton(); @@ -218,7 +218,7 @@ struct EventCallbacks { // Use "kill -SIGQUIT" to generate a data dump request. // Useful when running an android app since it doesn't go through // a normal Agent_OnUnload. - static void DataDumpRequest(jvmtiEnv* jvmti_env ATTRIBUTE_UNUSED) { + static void DataDumpRequest([[maybe_unused]] jvmtiEnv* jvmti_env) { TraceStatistics& stats = TraceStatistics::GetSingleton(); stats.Log(); } @@ -305,10 +305,9 @@ JNIEXPORT jint JNICALL Agent_OnLoad(JavaVM* jvm, // Note: This is not called for normal Android apps, // use "kill -SIGQUIT" instead to generate a data dump request. -JNIEXPORT void JNICALL Agent_OnUnload(JavaVM* vm ATTRIBUTE_UNUSED) { +JNIEXPORT void JNICALL Agent_OnUnload([[maybe_unused]] JavaVM* vm) { using namespace titrace; // NOLINT [build/namespaces] [5] LOG(INFO) << "Agent_OnUnload: Goodbye"; TraceStatistics::GetSingleton().Log(); } - diff --git a/tools/signal_dumper/signal_dumper.cc b/tools/signal_dumper/signal_dumper.cc index bedb8dc6fc..ebbe6ad674 100644 --- a/tools/signal_dumper/signal_dumper.cc +++ b/tools/signal_dumper/signal_dumper.cc @@ -657,7 +657,7 @@ void SetupAndWait(pid_t forked_pid, int signal, int timeout_exit_code) { } // namespace } // namespace art -int main(int argc ATTRIBUTE_UNUSED, char** argv) { +int main([[maybe_unused]] int argc, char** argv) { android::base::InitLogging(argv); int signal = SIGRTMIN + 2; diff --git a/tools/tracefast-plugin/tracefast.cc b/tools/tracefast-plugin/tracefast.cc index abc871d9b3..c9c135a903 100644 --- a/tools/tracefast-plugin/tracefast.cc +++ b/tools/tracefast-plugin/tracefast.cc @@ -43,73 +43,73 @@ class Tracer final : public art::instrumentation::InstrumentationListener { public: Tracer() {} - void MethodEntered(art::Thread* thread ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED) override + void MethodEntered([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::ArtMethod* method) override REQUIRES_SHARED(art::Locks::mutator_lock_) {} - void MethodExited(art::Thread* thread ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - art::instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED, - art::MutableHandle<art::mirror::Object>& return_value ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void MethodExited(art::Thread* thread ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - art::instrumentation::OptionalFrame frame ATTRIBUTE_UNUSED, - art::JValue& return_value ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void MethodUnwind(art::Thread* thread ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void DexPcMoved(art::Thread* thread ATTRIBUTE_UNUSED, - art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t new_dex_pc ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void FieldRead(art::Thread* thread ATTRIBUTE_UNUSED, - art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - art::ArtField* field ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void FieldWritten(art::Thread* thread ATTRIBUTE_UNUSED, - art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - art::ArtField* field ATTRIBUTE_UNUSED, - art::Handle<art::mirror::Object> field_value ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void FieldWritten(art::Thread* thread ATTRIBUTE_UNUSED, - art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - art::ArtField* field ATTRIBUTE_UNUSED, - const art::JValue& field_value ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void ExceptionThrown(art::Thread* thread ATTRIBUTE_UNUSED, - art::Handle<art::mirror::Throwable> exception_object ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void ExceptionHandled(art::Thread* self ATTRIBUTE_UNUSED, - art::Handle<art::mirror::Throwable> throwable ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void Branch(art::Thread* thread ATTRIBUTE_UNUSED, - art::ArtMethod* method ATTRIBUTE_UNUSED, - uint32_t dex_pc ATTRIBUTE_UNUSED, - int32_t dex_pc_offset ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } - - void WatchedFramePop(art::Thread* thread ATTRIBUTE_UNUSED, - const art::ShadowFrame& frame ATTRIBUTE_UNUSED) - override REQUIRES_SHARED(art::Locks::mutator_lock_) { } + void MethodExited([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] art::instrumentation::OptionalFrame frame, + [[maybe_unused]] art::MutableHandle<art::mirror::Object>& return_value) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void MethodExited([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] art::instrumentation::OptionalFrame frame, + [[maybe_unused]] art::JValue& return_value) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void MethodUnwind([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void DexPcMoved([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::Handle<art::mirror::Object> this_object, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] uint32_t new_dex_pc) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void FieldRead([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::Handle<art::mirror::Object> this_object, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] art::ArtField* field) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void FieldWritten([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::Handle<art::mirror::Object> this_object, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] art::ArtField* field, + [[maybe_unused]] art::Handle<art::mirror::Object> field_value) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void FieldWritten([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::Handle<art::mirror::Object> this_object, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] art::ArtField* field, + [[maybe_unused]] const art::JValue& field_value) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void ExceptionThrown([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::Handle<art::mirror::Throwable> exception_object) + override REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void ExceptionHandled([[maybe_unused]] art::Thread* self, + [[maybe_unused]] art::Handle<art::mirror::Throwable> throwable) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void Branch([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] art::ArtMethod* method, + [[maybe_unused]] uint32_t dex_pc, + [[maybe_unused]] int32_t dex_pc_offset) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} + + void WatchedFramePop([[maybe_unused]] art::Thread* thread, + [[maybe_unused]] const art::ShadowFrame& frame) override + REQUIRES_SHARED(art::Locks::mutator_lock_) {} private: DISALLOW_COPY_AND_ASSIGN(Tracer); diff --git a/tools/veridex/flow_analysis.cc b/tools/veridex/flow_analysis.cc index 6a4d3514d3..f30eb09b0f 100644 --- a/tools/veridex/flow_analysis.cc +++ b/tools/veridex/flow_analysis.cc @@ -760,7 +760,7 @@ RegisterValue FlowAnalysisCollector::AnalyzeInvoke(const Instruction& instructio } } -void FlowAnalysisCollector::AnalyzeFieldSet(const Instruction& instruction ATTRIBUTE_UNUSED) { +void FlowAnalysisCollector::AnalyzeFieldSet([[maybe_unused]] const Instruction& instruction) { // There are no fields that escape reflection uses. } @@ -792,7 +792,7 @@ RegisterValue FlowAnalysisSubstitutor::AnalyzeInvoke(const Instruction& instruct return GetReturnType(id); } -void FlowAnalysisSubstitutor::AnalyzeFieldSet(const Instruction& instruction ATTRIBUTE_UNUSED) { +void FlowAnalysisSubstitutor::AnalyzeFieldSet([[maybe_unused]] const Instruction& instruction) { // TODO: analyze field sets. } |