diff options
Diffstat (limited to 'compiler/optimizing')
| -rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 6 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 3 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_mips.cc | 6 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_mips64.cc | 6 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 6 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 6 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics.h | 2 |
7 files changed, 23 insertions, 12 deletions
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index e0106628c6..7ddd677fd0 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -59,7 +59,8 @@ static constexpr DRegister DTMP = D31; static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7; -#define __ down_cast<ArmAssembler*>(codegen->GetAssembler())-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<ArmAssembler*>(codegen->GetAssembler())-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value() class NullCheckSlowPathARM : public SlowPathCode { @@ -674,7 +675,8 @@ class ReadBarrierForRootSlowPathARM : public SlowPathCode { }; #undef __ -#define __ down_cast<ArmAssembler*>(GetAssembler())-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<ArmAssembler*>(GetAssembler())-> // NOLINT inline Condition ARMCondition(IfCondition cond) { switch (cond) { diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 261c04f062..362957bb31 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -132,7 +132,8 @@ Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type retur return ARM64ReturnLocation(return_type); } -#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value() // Calculate memory accessing operand for save/restore live registers. diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index fb50680c91..c3f425ac0d 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -141,7 +141,8 @@ Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) return MipsReturnLocation(type); } -#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsWordSize, x).Int32Value() class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS { @@ -478,7 +479,8 @@ CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph, } #undef __ -#define __ down_cast<MipsAssembler*>(GetAssembler())-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsWordSize, x).Int32Value() void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) { diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc index e67d8d0dc5..bb6df500cd 100644 --- a/compiler/optimizing/code_generator_mips64.cc +++ b/compiler/optimizing/code_generator_mips64.cc @@ -102,7 +102,8 @@ Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) return Mips64ReturnLocation(type); } -#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize, x).Int32Value() class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 { @@ -424,7 +425,8 @@ CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph, } #undef __ -#define __ down_cast<Mips64Assembler*>(GetAssembler())-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64DoublewordSize, x).Int32Value() void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) { diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 50892a9d48..eece18929c 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -47,7 +47,8 @@ static constexpr int kC2ConditionMask = 0x400; static constexpr int kFakeReturnRegister = Register(8); -#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86WordSize, x).Int32Value() class NullCheckSlowPathX86 : public SlowPathCode { @@ -691,7 +692,8 @@ class ReadBarrierForRootSlowPathX86 : public SlowPathCode { }; #undef __ -#define __ down_cast<X86Assembler*>(GetAssembler())-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<X86Assembler*>(GetAssembler())-> /* NOLINT */ inline Condition X86Condition(IfCondition cond) { switch (cond) { diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 56c5b06945..cd040641a6 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -51,7 +51,8 @@ static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 static constexpr int kC2ConditionMask = 0x400; -#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value() class NullCheckSlowPathX86_64 : public SlowPathCode { @@ -710,7 +711,8 @@ class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode { }; #undef __ -#define __ down_cast<X86_64Assembler*>(GetAssembler())-> +// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy. +#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT inline Condition X86_64IntegerCondition(IfCondition cond) { switch (cond) { diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h index 214250f337..83a512738b 100644 --- a/compiler/optimizing/intrinsics.h +++ b/compiler/optimizing/intrinsics.h @@ -165,7 +165,7 @@ public: \ void Set##name() { SetBit(k##name); } \ bool Get##name() const { return IsBitSet(k##name); } \ private: \ -static constexpr size_t k##name = bit + kNumberOfGenericOptimizations +static constexpr size_t k##name = (bit) + kNumberOfGenericOptimizations class StringEqualsOptimizations : public IntrinsicOptimizations { public: |