Use kTieredHotnessMask in arm32 and x86 baseline backends.
Those backends were missed when introducing kTieredHotnessMask.
Test: test.py
Change-Id: If72c00f4c1c7d668e0cd360773313c3717fd9456
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index dce6528..88551e0 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -30,6 +30,7 @@
#include "gc/accounting/card_table.h"
#include "gc/space/image_space.h"
#include "heap_poisoning.h"
+#include "interpreter/mterp/nterp.h"
#include "intrinsics.h"
#include "intrinsics_arm_vixl.h"
#include "linker/linker_patch.h"
@@ -2119,6 +2120,7 @@
__ Mov(r4, address);
__ Ldrh(ip, MemOperand(r4, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
__ Add(ip, ip, 1);
+ instruction_visitor_.GenerateAndConst(ip, ip, interpreter::kTieredHotnessMask);
__ Strh(ip, MemOperand(r4, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()));
if (!is_frame_entry) {
__ Pop(r4);
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index b8d20d1..f9b7c0d 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -342,6 +342,8 @@
ArmVIXLAssembler* GetAssembler() const { return assembler_; }
ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); }
+ void GenerateAndConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
+
private:
// Generate code for the given suspend check. If not null, `successor`
// is the block to branch to if the suspend check is not needed, and after
@@ -352,7 +354,6 @@
void GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
vixl::aarch32::Register temp,
vixl::aarch32::FlagsUpdate flags_update);
- void GenerateAndConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
void GenerateOrrConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
void GenerateEorConst(vixl::aarch32::Register out, vixl::aarch32::Register first, uint32_t value);
void GenerateAddLongConst(Location out, Location first, uint64_t value);
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index d3e58ed..3a9e2d4 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -26,6 +26,7 @@
#include "gc/accounting/card_table.h"
#include "gc/space/image_space.h"
#include "heap_poisoning.h"
+#include "interpreter/mterp/nterp.h"
#include "intrinsics.h"
#include "intrinsics_x86.h"
#include "jit/profiling_info.h"
@@ -1116,7 +1117,9 @@
__ movl(EAX, Immediate(address));
__ addw(Address(EAX, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()),
Immediate(1));
- __ j(kCarryClear, &done);
+ __ andw(Address(EAX, ProfilingInfo::BaselineHotnessCountOffset().Int32Value()),
+ Immediate(interpreter::kTieredHotnessMask));
+ __ j(kNotZero, &done);
GenerateInvokeRuntime(
GetThreadOffset<kX86PointerSize>(kQuickCompileOptimized).Int32Value());
__ Bind(&done);