summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/gvn.cc2
-rw-r--r--compiler/optimizing/intrinsic_objects.cc2
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc2
-rw-r--r--compiler/optimizing/load_store_elimination.cc2
-rw-r--r--compiler/optimizing/stack_map_stream.h2
5 files changed, 5 insertions, 5 deletions
diff --git a/compiler/optimizing/gvn.cc b/compiler/optimizing/gvn.cc
index a6ca057cfc..9113860387 100644
--- a/compiler/optimizing/gvn.cc
+++ b/compiler/optimizing/gvn.cc
@@ -248,7 +248,7 @@ class ValueSet : public ArenaObject<kArenaAllocGvn> {
// Iterates over buckets with impure instructions (even indices) and deletes
// the ones on which 'cond' returns true.
template<typename Functor>
- void DeleteAllImpureWhich(Functor cond) {
+ void DeleteAllImpureWhich(Functor&& cond) {
for (size_t i = 0; i < num_buckets_; i += 2) {
Node* node = buckets_[i];
Node* previous = nullptr;
diff --git a/compiler/optimizing/intrinsic_objects.cc b/compiler/optimizing/intrinsic_objects.cc
index 6c799d4132..c625d435ae 100644
--- a/compiler/optimizing/intrinsic_objects.cc
+++ b/compiler/optimizing/intrinsic_objects.cc
@@ -35,7 +35,7 @@ static int32_t FillIntrinsicsObjects(
ObjPtr<mirror::ObjectArray<mirror::Object>> live_objects,
int32_t expected_low,
int32_t expected_high,
- T type_check,
+ T&& type_check,
int32_t index)
REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::ObjectArray<mirror::Object>> cache =
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 2ae44cd4b0..56a5186d36 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -3969,7 +3969,7 @@ template<typename OP>
void GenerateFP16Round(HInvoke* invoke,
CodeGeneratorARM64* const codegen_,
MacroAssembler* masm,
- const OP roundOp) {
+ OP&& roundOp) {
DCHECK(codegen_->GetInstructionSetFeatures().HasFP16());
LocationSummary* locations = invoke->GetLocations();
UseScratchRegisterScope scratch_scope(masm);
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index 38c1cfc0d2..09e23a8552 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -2727,7 +2727,7 @@ struct ScopedRestoreHeapValues {
}
template<typename Func>
- void ForEachRecord(Func func) {
+ void ForEachRecord(Func&& func) {
for (size_t blk_id : Range(to_restore_.size())) {
for (size_t heap_loc : Range(to_restore_[blk_id].size())) {
LSEVisitor::ValueRecord* vr = &to_restore_[blk_id][heap_loc];
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 8c7b1c01a7..a3daa29b4e 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -110,7 +110,7 @@ class StackMapStream : public DeletableArenaObject<kArenaAllocStackMapStream> {
// Invokes the callback with pointer of each BitTableBuilder field.
template<typename Callback>
- void ForEachBitTable(Callback callback) {
+ void ForEachBitTable(Callback&& callback) {
size_t index = 0;
callback(index++, &stack_maps_);
callback(index++, &register_masks_);