diff options
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/bounds_check_elimination.cc | 33 | ||||
-rw-r--r-- | compiler/optimizing/induction_var_range.cc | 32 | ||||
-rw-r--r-- | compiler/optimizing/induction_var_range.h | 9 | ||||
-rw-r--r-- | compiler/optimizing/induction_var_range_test.cc | 12 | ||||
-rw-r--r-- | compiler/optimizing/inliner.cc | 218 | ||||
-rw-r--r-- | compiler/optimizing/inliner.h | 28 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm.cc | 123 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 109 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_mips.cc | 522 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_mips.h | 3 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_mips64.cc | 111 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 114 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86_64.cc | 100 | ||||
-rw-r--r-- | compiler/optimizing/licm.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/load_store_elimination.cc | 3 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 14 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 65 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler_stats.h | 8 |
18 files changed, 1171 insertions, 335 deletions
diff --git a/compiler/optimizing/bounds_check_elimination.cc b/compiler/optimizing/bounds_check_elimination.cc index a44830207d..7dbfd7c58e 100644 --- a/compiler/optimizing/bounds_check_elimination.cc +++ b/compiler/optimizing/bounds_check_elimination.cc @@ -1228,19 +1228,26 @@ class BCEVisitor : public HGraphVisitor { InductionVarRange::Value v2; bool needs_finite_test = false; induction_range_.GetInductionRange(context, index, &v1, &v2, &needs_finite_test); - if (v1.is_known && (v1.a_constant == 0 || v1.a_constant == 1) && - v2.is_known && (v2.a_constant == 0 || v2.a_constant == 1)) { - DCHECK(v1.a_constant == 1 || v1.instruction == nullptr); - DCHECK(v2.a_constant == 1 || v2.instruction == nullptr); - ValueRange index_range(GetGraph()->GetArena(), - ValueBound(v1.instruction, v1.b_constant), - ValueBound(v2.instruction, v2.b_constant)); - // If analysis reveals a certain OOB, disable dynamic BCE. - *try_dynamic_bce = !index_range.GetLower().LessThan(array_range->GetLower()) && - !index_range.GetUpper().GreaterThan(array_range->GetUpper()); - // Use analysis for static bce only if loop is finite. - return !needs_finite_test && index_range.FitsIn(array_range); - } + do { + if (v1.is_known && (v1.a_constant == 0 || v1.a_constant == 1) && + v2.is_known && (v2.a_constant == 0 || v2.a_constant == 1)) { + DCHECK(v1.a_constant == 1 || v1.instruction == nullptr); + DCHECK(v2.a_constant == 1 || v2.instruction == nullptr); + ValueRange index_range(GetGraph()->GetArena(), + ValueBound(v1.instruction, v1.b_constant), + ValueBound(v2.instruction, v2.b_constant)); + // If analysis reveals a certain OOB, disable dynamic BCE. + if (index_range.GetLower().LessThan(array_range->GetLower()) || + index_range.GetUpper().GreaterThan(array_range->GetUpper())) { + *try_dynamic_bce = false; + return false; + } + // Use analysis for static bce only if loop is finite. + if (!needs_finite_test && index_range.FitsIn(array_range)) { + return true; + } + } + } while (induction_range_.RefineOuter(&v1, &v2)); return false; } diff --git a/compiler/optimizing/induction_var_range.cc b/compiler/optimizing/induction_var_range.cc index 2ac1e152e1..9d0cde7c9f 100644 --- a/compiler/optimizing/induction_var_range.cc +++ b/compiler/optimizing/induction_var_range.cc @@ -119,6 +119,17 @@ void InductionVarRange::GetInductionRange(HInstruction* context, } } +bool InductionVarRange::RefineOuter(/*in-out*/Value* min_val, /*in-out*/Value* max_val) { + Value v1 = RefineOuter(*min_val, /* is_min */ true); + Value v2 = RefineOuter(*max_val, /* is_min */ false); + if (v1.instruction != min_val->instruction || v2.instruction != max_val->instruction) { + *min_val = v1; + *max_val = v2; + return true; + } + return false; +} + bool InductionVarRange::CanGenerateCode(HInstruction* context, HInstruction* instruction, /*out*/bool* needs_finite_test, @@ -202,6 +213,8 @@ InductionVarRange::Value InductionVarRange::GetFetch(HInstruction* instruction, } else if (IsIntAndGet(instruction->InputAt(1), &value)) { return AddValue(GetFetch(instruction->InputAt(0), trip, in_body, is_min), Value(value)); } + } else if (instruction->IsArrayLength() && instruction->InputAt(0)->IsNewArray()) { + return GetFetch(instruction->InputAt(0)->InputAt(0), trip, in_body, is_min); } else if (is_min) { // Special case for finding minimum: minimum of trip-count in loop-body is 1. if (trip != nullptr && in_body && instruction == trip->op_a->fetch) { @@ -404,6 +417,25 @@ InductionVarRange::Value InductionVarRange::MergeVal(Value v1, Value v2, bool is return Value(); } +InductionVarRange::Value InductionVarRange::RefineOuter(Value v, bool is_min) { + if (v.instruction != nullptr) { + HLoopInformation* loop = + v.instruction->GetBlock()->GetLoopInformation(); // closest enveloping loop + if (loop != nullptr) { + // Set up loop information. + bool in_body = true; // use is always in body of outer loop + HInductionVarAnalysis::InductionInfo* info = + induction_analysis_->LookupInfo(loop, v.instruction); + HInductionVarAnalysis::InductionInfo* trip = + induction_analysis_->LookupInfo(loop, loop->GetHeader()->GetLastInstruction()); + // Try to refine "a x instruction + b" with outer loop range information on instruction. + return AddValue(MulValue(Value(v.a_constant), GetVal(info, trip, in_body, is_min)), + Value(v.b_constant)); + } + } + return v; +} + bool InductionVarRange::GenerateCode(HInstruction* context, HInstruction* instruction, HGraph* graph, diff --git a/compiler/optimizing/induction_var_range.h b/compiler/optimizing/induction_var_range.h index 7984871b08..71b0b1b4c3 100644 --- a/compiler/optimizing/induction_var_range.h +++ b/compiler/optimizing/induction_var_range.h @@ -68,6 +68,9 @@ class InductionVarRange { /*out*/Value* max_val, /*out*/bool* needs_finite_test); + /** Refines the values with induction of next outer loop. Returns true on change. */ + bool RefineOuter(/*in-out*/Value* min_val, /*in-out*/Value* max_val); + /** * Returns true if range analysis is able to generate code for the lower and upper * bound expressions on the instruction in the given context. The need_finite_test @@ -149,6 +152,12 @@ class InductionVarRange { static Value MergeVal(Value v1, Value v2, bool is_min); /** + * Returns refined value using induction of next outer loop or the input value if no + * further refinement is possible. + */ + Value RefineOuter(Value val, bool is_min); + + /** * Generates code for lower/upper/taken-test in the HIR. Returns true on success. * With values nullptr, the method can be used to determine if code generation * would be successful without generating actual code yet. diff --git a/compiler/optimizing/induction_var_range_test.cc b/compiler/optimizing/induction_var_range_test.cc index c2ba157ed8..128b5bb811 100644 --- a/compiler/optimizing/induction_var_range_test.cc +++ b/compiler/optimizing/induction_var_range_test.cc @@ -473,16 +473,19 @@ TEST_F(InductionVarRangeTest, ConstantTripCountUp) { EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); // In context of loop-body: known. range.GetInductionRange(increment_, condition_->InputAt(0), &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(999), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); range.GetInductionRange(increment_, increment_, &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(1), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); } TEST_F(InductionVarRangeTest, ConstantTripCountDown) { @@ -498,16 +501,19 @@ TEST_F(InductionVarRangeTest, ConstantTripCountDown) { EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); // In context of loop-body: known. range.GetInductionRange(increment_, condition_->InputAt(0), &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(1), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); range.GetInductionRange(increment_, increment_, &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(999), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); } TEST_F(InductionVarRangeTest, SymbolicTripCountUp) { @@ -527,16 +533,19 @@ TEST_F(InductionVarRangeTest, SymbolicTripCountUp) { EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); // In context of loop-body: known. range.GetInductionRange(increment_, condition_->InputAt(0), &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(0), v1); ExpectEqual(Value(parameter, 1, -1), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); range.GetInductionRange(increment_, increment_, &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(1), v1); ExpectEqual(Value(parameter, 1, 0), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); HInstruction* lower = nullptr; HInstruction* upper = nullptr; @@ -597,16 +606,19 @@ TEST_F(InductionVarRangeTest, SymbolicTripCountDown) { EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); // In context of loop-body: known. range.GetInductionRange(increment_, condition_->InputAt(0), &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(parameter, 1, 1), v1); ExpectEqual(Value(1000), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); range.GetInductionRange(increment_, increment_, &v1, &v2, &needs_finite_test); EXPECT_FALSE(needs_finite_test); ExpectEqual(Value(parameter, 1, 0), v1); ExpectEqual(Value(999), v2); + EXPECT_FALSE(range.RefineOuter(&v1, &v2)); HInstruction* lower = nullptr; HInstruction* upper = nullptr; diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 6d93be37a7..a4dcb3aeba 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -171,13 +171,37 @@ static uint32_t FindMethodIndexIn(ArtMethod* method, const DexFile& dex_file, uint32_t referrer_index) SHARED_REQUIRES(Locks::mutator_lock_) { - if (method->GetDexFile()->GetLocation().compare(dex_file.GetLocation()) == 0) { + if (IsSameDexFile(*method->GetDexFile(), dex_file)) { return method->GetDexMethodIndex(); } else { return method->FindDexMethodIndexInOtherDexFile(dex_file, referrer_index); } } +static uint32_t FindClassIndexIn(mirror::Class* cls, const DexFile& dex_file) + SHARED_REQUIRES(Locks::mutator_lock_) { + if (cls->GetDexCache() == nullptr) { + DCHECK(cls->IsArrayClass()); + // TODO: find the class in `dex_file`. + return DexFile::kDexNoIndex; + } else if (cls->GetDexTypeIndex() == DexFile::kDexNoIndex16) { + // TODO: deal with proxy classes. + return DexFile::kDexNoIndex; + } else if (IsSameDexFile(cls->GetDexFile(), dex_file)) { + // Update the dex cache to ensure the class is in. The generated code will + // consider it is. We make it safe by updating the dex cache, as other + // dex files might also load the class, and there is no guarantee the dex + // cache of the dex file of the class will be updated. + if (cls->GetDexCache()->GetResolvedType(cls->GetDexTypeIndex()) == nullptr) { + cls->GetDexCache()->SetResolvedType(cls->GetDexTypeIndex(), cls); + } + return cls->GetDexTypeIndex(); + } else { + // TODO: find the class in `dex_file`. + return DexFile::kDexNoIndex; + } +} + bool HInliner::TryInline(HInvoke* invoke_instruction) { if (invoke_instruction->IsInvokeUnresolved()) { return false; // Don't bother to move further if we know the method is unresolved. @@ -214,53 +238,176 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { return false; } - if (!invoke_instruction->IsInvokeStaticOrDirect()) { - resolved_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method); - if (resolved_method == nullptr) { + if (invoke_instruction->IsInvokeStaticOrDirect()) { + return TryInline(invoke_instruction, resolved_method); + } + + // Check if we can statically find the method. + ArtMethod* actual_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method); + if (actual_method != nullptr) { + return TryInline(invoke_instruction, actual_method); + } + + // Check if we can use an inline cache. + ArtMethod* caller = graph_->GetArtMethod(); + size_t pointer_size = class_linker->GetImagePointerSize(); + // Under JIT, we should always know the caller. + DCHECK(!Runtime::Current()->UseJit() || (caller != nullptr)); + if (caller != nullptr && caller->GetProfilingInfo(pointer_size) != nullptr) { + ProfilingInfo* profiling_info = caller->GetProfilingInfo(pointer_size); + const InlineCache& ic = *profiling_info->GetInlineCache(invoke_instruction->GetDexPc()); + if (ic.IsUnitialized()) { VLOG(compiler) << "Interface or virtual call to " << PrettyMethod(method_index, caller_dex_file) - << " could not be statically determined"; + << " is not hit and not inlined"; return false; - } - // We have found a method, but we need to find where that method is for the caller's - // dex file. - method_index = FindMethodIndexIn(resolved_method, caller_dex_file, method_index); - if (method_index == DexFile::kDexNoIndex) { + } else if (ic.IsMonomorphic()) { + MaybeRecordStat(kMonomorphicCall); + return TryInlineMonomorphicCall(invoke_instruction, resolved_method, ic); + } else if (ic.IsPolymorphic()) { + MaybeRecordStat(kPolymorphicCall); + return TryInlinePolymorphicCall(invoke_instruction, resolved_method, ic); + } else { + DCHECK(ic.IsMegamorphic()); VLOG(compiler) << "Interface or virtual call to " - << PrettyMethod(resolved_method) - << " cannot be inlined because unaccessible to caller"; + << PrettyMethod(method_index, caller_dex_file) + << " is megamorphic and not inlined"; + MaybeRecordStat(kMegamorphicCall); return false; } } - bool same_dex_file = - IsSameDexFile(*outer_compilation_unit_.GetDexFile(), *resolved_method->GetDexFile()); + VLOG(compiler) << "Interface or virtual call to " + << PrettyMethod(method_index, caller_dex_file) + << " could not be statically determined"; + return false; +} - const DexFile::CodeItem* code_item = resolved_method->GetCodeItem(); +bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction, + ArtMethod* resolved_method, + const InlineCache& ic) { + const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile(); + uint32_t class_index = FindClassIndexIn(ic.GetMonomorphicType(), caller_dex_file); + if (class_index == DexFile::kDexNoIndex) { + VLOG(compiler) << "Call to " << PrettyMethod(resolved_method) + << " from inline cache is not inlined because its class is not" + << " accessible to the caller"; + return false; + } + + ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker(); + size_t pointer_size = class_linker->GetImagePointerSize(); + if (invoke_instruction->IsInvokeInterface()) { + resolved_method = ic.GetMonomorphicType()->FindVirtualMethodForInterface( + resolved_method, pointer_size); + } else { + DCHECK(invoke_instruction->IsInvokeVirtual()); + resolved_method = ic.GetMonomorphicType()->FindVirtualMethodForVirtual( + resolved_method, pointer_size); + } + DCHECK(resolved_method != nullptr); + HInstruction* receiver = invoke_instruction->InputAt(0); + HInstruction* cursor = invoke_instruction->GetPrevious(); + HBasicBlock* bb_cursor = invoke_instruction->GetBlock(); + + if (!TryInline(invoke_instruction, resolved_method, /* do_rtp */ false)) { + return false; + } + + // We successfully inlined, now add a guard. + ArtField* field = class_linker->GetClassRoot(ClassLinker::kJavaLangObject)->GetInstanceField(0); + DCHECK_EQ(std::string(field->GetName()), "shadow$_klass_"); + HInstanceFieldGet* field_get = new (graph_->GetArena()) HInstanceFieldGet( + receiver, + Primitive::kPrimNot, + field->GetOffset(), + field->IsVolatile(), + field->GetDexFieldIndex(), + field->GetDeclaringClass()->GetDexClassDefIndex(), + *field->GetDexFile(), + handles_->NewHandle(field->GetDexCache()), + invoke_instruction->GetDexPc()); + + bool is_referrer = + (ic.GetMonomorphicType() == outermost_graph_->GetArtMethod()->GetDeclaringClass()); + HLoadClass* load_class = new (graph_->GetArena()) HLoadClass(graph_->GetCurrentMethod(), + class_index, + caller_dex_file, + is_referrer, + invoke_instruction->GetDexPc(), + /* needs_access_check */ false, + /* is_in_dex_cache */ true); + + HNotEqual* compare = new (graph_->GetArena()) HNotEqual(load_class, field_get); + HDeoptimize* deoptimize = new (graph_->GetArena()) HDeoptimize( + compare, invoke_instruction->GetDexPc()); + // TODO: Extend reference type propagation to understand the guard. + if (cursor != nullptr) { + bb_cursor->InsertInstructionAfter(load_class, cursor); + } else { + bb_cursor->InsertInstructionBefore(load_class, bb_cursor->GetFirstInstruction()); + } + bb_cursor->InsertInstructionAfter(field_get, load_class); + bb_cursor->InsertInstructionAfter(compare, field_get); + bb_cursor->InsertInstructionAfter(deoptimize, compare); + deoptimize->CopyEnvironmentFrom(invoke_instruction->GetEnvironment()); + + // Run type propagation to get the guard typed, and eventually propagate the + // type of the receiver. + ReferenceTypePropagation rtp_fixup(graph_, handles_); + rtp_fixup.Run(); + + MaybeRecordStat(kInlinedMonomorphicCall); + return true; +} + +bool HInliner::TryInlinePolymorphicCall(HInvoke* invoke_instruction ATTRIBUTE_UNUSED, + ArtMethod* resolved_method, + const InlineCache& ic ATTRIBUTE_UNUSED) { + // TODO + VLOG(compiler) << "Unimplemented polymorphic inlining for " + << PrettyMethod(resolved_method); + return false; +} + +bool HInliner::TryInline(HInvoke* invoke_instruction, ArtMethod* method, bool do_rtp) { + const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile(); + uint32_t method_index = FindMethodIndexIn( + method, caller_dex_file, invoke_instruction->GetDexMethodIndex()); + if (method_index == DexFile::kDexNoIndex) { + VLOG(compiler) << "Call to " + << PrettyMethod(method) + << " cannot be inlined because unaccessible to caller"; + return false; + } + + bool same_dex_file = IsSameDexFile(*outer_compilation_unit_.GetDexFile(), *method->GetDexFile()); + + const DexFile::CodeItem* code_item = method->GetCodeItem(); if (code_item == nullptr) { - VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file) + VLOG(compiler) << "Method " << PrettyMethod(method) << " is not inlined because it is native"; return false; } size_t inline_max_code_units = compiler_driver_->GetCompilerOptions().GetInlineMaxCodeUnits(); if (code_item->insns_size_in_code_units_ > inline_max_code_units) { - VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file) + VLOG(compiler) << "Method " << PrettyMethod(method) << " is too big to inline"; return false; } if (code_item->tries_size_ != 0) { - VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file) + VLOG(compiler) << "Method " << PrettyMethod(method) << " is not inlined because of try block"; return false; } - if (!resolved_method->GetDeclaringClass()->IsVerified()) { - uint16_t class_def_idx = resolved_method->GetDeclaringClass()->GetDexClassDefIndex(); + if (!method->GetDeclaringClass()->IsVerified()) { + uint16_t class_def_idx = method->GetDeclaringClass()->GetDexClassDefIndex(); if (!compiler_driver_->IsMethodVerifiedWithoutFailures( - resolved_method->GetDexMethodIndex(), class_def_idx, *resolved_method->GetDexFile())) { + method->GetDexMethodIndex(), class_def_idx, *method->GetDexFile())) { VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file) << " couldn't be verified, so it cannot be inlined"; return false; @@ -277,7 +424,7 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { return false; } - if (!TryBuildAndInline(resolved_method, invoke_instruction, same_dex_file)) { + if (!TryBuildAndInline(method, invoke_instruction, same_dex_file, do_rtp)) { return false; } @@ -288,7 +435,8 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, HInvoke* invoke_instruction, - bool same_dex_file) { + bool same_dex_file, + bool do_rtp) { ScopedObjectAccess soa(Thread::Current()); const DexFile::CodeItem* code_item = resolved_method->GetCodeItem(); const DexFile& callee_dex_file = *resolved_method->GetDexFile(); @@ -341,6 +489,7 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, invoke_type, graph_->IsDebuggable(), graph_->GetCurrentInstructionId()); + callee_graph->SetArtMethod(resolved_method); OptimizingCompilerStats inline_stats; HGraphBuilder builder(callee_graph, @@ -422,6 +571,7 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, size_t number_of_instructions_budget = kMaximumNumberOfHInstructions; if (depth_ + 1 < compiler_driver_->GetCompilerOptions().GetInlineDepthLimit()) { HInliner inliner(callee_graph, + outermost_graph_, codegen_, outer_compilation_unit_, dex_compilation_unit, @@ -533,9 +683,9 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, HNullConstant* null_constant = graph_->GetNullConstant(); if (!null_constant->GetReferenceTypeInfo().IsValid()) { ReferenceTypeInfo::TypeHandle obj_handle = - handles_->NewHandle(class_linker->GetClassRoot(ClassLinker::kJavaLangObject)); + handles_->NewHandle(class_linker->GetClassRoot(ClassLinker::kJavaLangObject)); null_constant->SetReferenceTypeInfo( - ReferenceTypeInfo::Create(obj_handle, false /* is_exact */)); + ReferenceTypeInfo::Create(obj_handle, false /* is_exact */)); } // Check the integrity of reference types and run another type propagation if needed. @@ -554,14 +704,16 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, return_handle, return_handle->CannotBeAssignedFromOtherTypes() /* is_exact */)); } - // If the return type is a refinement of the declared type run the type propagation again. - ReferenceTypeInfo return_rti = return_replacement->GetReferenceTypeInfo(); - ReferenceTypeInfo invoke_rti = invoke_instruction->GetReferenceTypeInfo(); - if (invoke_rti.IsStrictSupertypeOf(return_rti) - || (return_rti.IsExact() && !invoke_rti.IsExact()) - || !return_replacement->CanBeNull()) { - ReferenceTypePropagation rtp_fixup(graph_, handles_); - rtp_fixup.Run(); + if (do_rtp) { + // If the return type is a refinement of the declared type run the type propagation again. + ReferenceTypeInfo return_rti = return_replacement->GetReferenceTypeInfo(); + ReferenceTypeInfo invoke_rti = invoke_instruction->GetReferenceTypeInfo(); + if (invoke_rti.IsStrictSupertypeOf(return_rti) + || (return_rti.IsExact() && !invoke_rti.IsExact()) + || !return_replacement->CanBeNull()) { + ReferenceTypePropagation rtp_fixup(graph_, handles_); + rtp_fixup.Run(); + } } } diff --git a/compiler/optimizing/inliner.h b/compiler/optimizing/inliner.h index 0f6a9453be..7b9fb73ccf 100644 --- a/compiler/optimizing/inliner.h +++ b/compiler/optimizing/inliner.h @@ -27,11 +27,13 @@ class CompilerDriver; class DexCompilationUnit; class HGraph; class HInvoke; +class InlineCache; class OptimizingCompilerStats; class HInliner : public HOptimization { public: HInliner(HGraph* outer_graph, + HGraph* outermost_graph, CodeGenerator* codegen, const DexCompilationUnit& outer_compilation_unit, const DexCompilationUnit& caller_compilation_unit, @@ -40,6 +42,7 @@ class HInliner : public HOptimization { OptimizingCompilerStats* stats, size_t depth = 0) : HOptimization(outer_graph, kInlinerPassName, stats), + outermost_graph_(outermost_graph), outer_compilation_unit_(outer_compilation_unit), caller_compilation_unit_(caller_compilation_unit), codegen_(codegen), @@ -54,10 +57,33 @@ class HInliner : public HOptimization { private: bool TryInline(HInvoke* invoke_instruction); + + // Try to inline `resolved_method` in place of `invoke_instruction`. `do_rtp` is whether + // reference type propagation can run after the inlining. + bool TryInline(HInvoke* invoke_instruction, ArtMethod* resolved_method, bool do_rtp = true) + SHARED_REQUIRES(Locks::mutator_lock_); + + // Try to inline the target of a monomorphic call. If successful, the code + // in the graph will look like: + // if (receiver.getClass() != ic.GetMonomorphicType()) deopt + // ... // inlined code + bool TryInlineMonomorphicCall(HInvoke* invoke_instruction, + ArtMethod* resolved_method, + const InlineCache& ic) + SHARED_REQUIRES(Locks::mutator_lock_); + + // Try to inline targets of a polymorphic call. Currently unimplemented. + bool TryInlinePolymorphicCall(HInvoke* invoke_instruction, + ArtMethod* resolved_method, + const InlineCache& ic) + SHARED_REQUIRES(Locks::mutator_lock_); + bool TryBuildAndInline(ArtMethod* resolved_method, HInvoke* invoke_instruction, - bool same_dex_file); + bool same_dex_file, + bool do_rtp = true); + HGraph* const outermost_graph_; const DexCompilationUnit& outer_compilation_unit_; const DexCompilationUnit& caller_compilation_unit_; CodeGenerator* const codegen_; diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc index d2017da221..5329b5c1b7 100644 --- a/compiler/optimizing/intrinsics_arm.cc +++ b/compiler/optimizing/intrinsics_arm.cc @@ -113,10 +113,10 @@ void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) { @@ -127,10 +127,10 @@ void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -358,7 +358,7 @@ void IntrinsicLocationsBuilderARM::VisitIntegerRotateRight(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitIntegerRotateRight(HInvoke* invoke) { - GenIntegerRotate(invoke->GetLocations(), GetAssembler(), false /* is_left */); + GenIntegerRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ false); } void IntrinsicLocationsBuilderARM::VisitLongRotateRight(HInvoke* invoke) { @@ -377,7 +377,7 @@ void IntrinsicLocationsBuilderARM::VisitLongRotateRight(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitLongRotateRight(HInvoke* invoke) { - GenLongRotate(invoke->GetLocations(), GetAssembler(), false /* is_left */); + GenLongRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ false); } void IntrinsicLocationsBuilderARM::VisitIntegerRotateLeft(HInvoke* invoke) { @@ -390,7 +390,7 @@ void IntrinsicLocationsBuilderARM::VisitIntegerRotateLeft(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitIntegerRotateLeft(HInvoke* invoke) { - GenIntegerRotate(invoke->GetLocations(), GetAssembler(), true /* is_left */); + GenIntegerRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ true); } void IntrinsicLocationsBuilderARM::VisitLongRotateLeft(HInvoke* invoke) { @@ -409,7 +409,7 @@ void IntrinsicLocationsBuilderARM::VisitLongRotateLeft(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitLongRotateLeft(HInvoke* invoke) { - GenLongRotate(invoke->GetLocations(), GetAssembler(), true /* is_left */); + GenLongRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ true); } static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { @@ -429,7 +429,7 @@ void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) { @@ -437,7 +437,7 @@ void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) { @@ -486,7 +486,7 @@ void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), false, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } @@ -495,7 +495,7 @@ void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), true, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenMinMax(LocationSummary* locations, @@ -526,7 +526,7 @@ void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler()); } void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) { @@ -534,7 +534,7 @@ void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler()); } void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) { @@ -742,22 +742,22 @@ void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) } void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, @@ -787,31 +787,34 @@ static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, } void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke); + CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke); + CreateIntIntIntIntToVoid( + arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke); + CreateIntIntIntIntToVoid( + arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke); } void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke); + CreateIntIntIntIntToVoid( + arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke); } static void GenUnsafePut(LocationSummary* locations, @@ -873,31 +876,67 @@ static void GenUnsafePut(LocationSummary* locations, } void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena, @@ -1245,7 +1284,8 @@ void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) { - GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); + GenerateVisitStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) { @@ -1265,7 +1305,8 @@ void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); + GenerateVisitStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) { @@ -1644,7 +1685,7 @@ void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) { temp2, dest, Register(kNoRegister), - false); + /* can_be_null */ false); __ Bind(slow_path->GetExitLabel()); } diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index b04dcceb05..962c4d5167 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -202,10 +202,10 @@ void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke } void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); } void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { @@ -216,10 +216,10 @@ void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); } void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -477,7 +477,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) { @@ -485,7 +485,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); } static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) { @@ -514,7 +514,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) { @@ -522,7 +522,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); } static void GenMinMaxFP(LocationSummary* locations, @@ -557,7 +557,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) { @@ -565,7 +565,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { @@ -573,7 +573,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { @@ -581,7 +581,8 @@ void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler()); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler()); } static void GenMinMax(LocationSummary* locations, @@ -614,7 +615,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) { @@ -622,7 +623,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) { @@ -630,7 +631,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) { @@ -638,7 +639,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) { @@ -714,7 +715,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) { - GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler()); + GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) { @@ -722,7 +723,7 @@ void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) { - GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler()); + GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler()); } void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) { @@ -895,22 +896,22 @@ void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invok } void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { @@ -1001,31 +1002,67 @@ static void GenUnsafePut(LocationSummary* locations, } void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) { @@ -1379,7 +1416,8 @@ void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) { - GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true); + GenerateVisitStringIndexOf( + invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) { @@ -1399,7 +1437,8 @@ void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false); + GenerateVisitStringIndexOf( + invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) { diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc index 326844526e..9ecce0e93a 100644 --- a/compiler/optimizing/intrinsics_mips.cc +++ b/compiler/optimizing/intrinsics_mips.cc @@ -43,6 +43,14 @@ ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() { return codegen_->GetGraph()->GetArena(); } +inline bool IntrinsicCodeGeneratorMIPS::IsR2OrNewer() { + return codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(); +} + +inline bool IntrinsicCodeGeneratorMIPS::IsR6() { + return codegen_->GetInstructionSetFeatures().IsR6(); +} + #define __ codegen->GetAssembler()-> static void MoveFromReturnRegister(Location trg, @@ -168,7 +176,7 @@ void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invo } void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // int java.lang.Float.floatToRawIntBits(float) @@ -177,7 +185,7 @@ void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -210,7 +218,7 @@ void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // float java.lang.Float.intBitsToFloat(int) @@ -219,24 +227,29 @@ void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } -static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { +static void CreateIntToIntLocations(ArenaAllocator* arena, + HInvoke* invoke, + Location::OutputOverlap overlaps = Location::kNoOutputOverlap) { LocationSummary* locations = new (arena) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); locations->SetInAt(0, Location::RequiresRegister()); - locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); + locations->SetOut(Location::RequiresRegister(), overlaps); } -static void GenReverseBytes(LocationSummary* locations, - Primitive::Type type, - MipsAssembler* assembler, - bool isR2OrNewer) { +static void GenReverse(LocationSummary* locations, + Primitive::Type type, + bool isR2OrNewer, + bool isR6, + bool reverseBits, + MipsAssembler* assembler) { DCHECK(type == Primitive::kPrimShort || type == Primitive::kPrimInt || type == Primitive::kPrimLong); + DCHECK(type != Primitive::kPrimShort || !reverseBits); if (type == Primitive::kPrimShort) { Register in = locations->InAt(0).AsRegister<Register>(); @@ -273,6 +286,30 @@ static void GenReverseBytes(LocationSummary* locations, __ And(out, out, AT); __ Or(out, out, TMP); } + if (reverseBits) { + if (isR6) { + __ Bitswap(out, out); + } else { + __ LoadConst32(AT, 0x0F0F0F0F); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 4); + __ Srl(out, out, 4); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ LoadConst32(AT, 0x33333333); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 2); + __ Srl(out, out, 2); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ LoadConst32(AT, 0x55555555); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 1); + __ Srl(out, out, 1); + __ And(out, out, AT); + __ Or(out, TMP, out); + } + } } else if (type == Primitive::kPrimLong) { Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); @@ -314,6 +351,46 @@ static void GenReverseBytes(LocationSummary* locations, __ And(out_lo, out_lo, AT); __ Or(out_lo, out_lo, TMP); } + if (reverseBits) { + if (isR6) { + __ Bitswap(out_hi, out_hi); + __ Bitswap(out_lo, out_lo); + } else { + __ LoadConst32(AT, 0x0F0F0F0F); + __ And(TMP, out_hi, AT); + __ Sll(TMP, TMP, 4); + __ Srl(out_hi, out_hi, 4); + __ And(out_hi, out_hi, AT); + __ Or(out_hi, TMP, out_hi); + __ And(TMP, out_lo, AT); + __ Sll(TMP, TMP, 4); + __ Srl(out_lo, out_lo, 4); + __ And(out_lo, out_lo, AT); + __ Or(out_lo, TMP, out_lo); + __ LoadConst32(AT, 0x33333333); + __ And(TMP, out_hi, AT); + __ Sll(TMP, TMP, 2); + __ Srl(out_hi, out_hi, 2); + __ And(out_hi, out_hi, AT); + __ Or(out_hi, TMP, out_hi); + __ And(TMP, out_lo, AT); + __ Sll(TMP, TMP, 2); + __ Srl(out_lo, out_lo, 2); + __ And(out_lo, out_lo, AT); + __ Or(out_lo, TMP, out_lo); + __ LoadConst32(AT, 0x55555555); + __ And(TMP, out_hi, AT); + __ Sll(TMP, TMP, 1); + __ Srl(out_hi, out_hi, 1); + __ And(out_hi, out_hi, AT); + __ Or(out_hi, TMP, out_hi); + __ And(TMP, out_lo, AT); + __ Sll(TMP, TMP, 1); + __ Srl(out_lo, out_lo, 1); + __ And(out_lo, out_lo, AT); + __ Or(out_lo, TMP, out_lo); + } + } } } @@ -323,10 +400,12 @@ void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) { - GenReverseBytes(invoke->GetLocations(), - Primitive::kPrimInt, - GetAssembler(), - codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2()); + GenReverse(invoke->GetLocations(), + Primitive::kPrimInt, + IsR2OrNewer(), + IsR6(), + false, + GetAssembler()); } // long java.lang.Long.reverseBytes(long) @@ -335,10 +414,12 @@ void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) { - GenReverseBytes(invoke->GetLocations(), - Primitive::kPrimLong, - GetAssembler(), - codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2()); + GenReverse(invoke->GetLocations(), + Primitive::kPrimLong, + IsR2OrNewer(), + IsR6(), + false, + GetAssembler()); } // short java.lang.Short.reverseBytes(short) @@ -347,10 +428,397 @@ void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) { - GenReverseBytes(invoke->GetLocations(), - Primitive::kPrimShort, - GetAssembler(), - codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2()); + GenReverse(invoke->GetLocations(), + Primitive::kPrimShort, + IsR2OrNewer(), + IsR6(), + false, + GetAssembler()); +} + +static void GenNumberOfLeadingZeroes(LocationSummary* locations, + bool is64bit, + bool isR6, + MipsAssembler* assembler) { + Register out = locations->Out().AsRegister<Register>(); + if (is64bit) { + Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); + Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); + + if (isR6) { + __ ClzR6(AT, in_hi); + __ ClzR6(TMP, in_lo); + __ Seleqz(TMP, TMP, in_hi); + } else { + __ ClzR2(AT, in_hi); + __ ClzR2(TMP, in_lo); + __ Movn(TMP, ZERO, in_hi); + } + __ Addu(out, AT, TMP); + } else { + Register in = locations->InAt(0).AsRegister<Register>(); + + if (isR6) { + __ ClzR6(out, in); + } else { + __ ClzR2(out, in); + } + } +} + +// int java.lang.Integer.numberOfLeadingZeros(int i) +void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { + GenNumberOfLeadingZeroes(invoke->GetLocations(), false, IsR6(), GetAssembler()); +} + +// int java.lang.Long.numberOfLeadingZeros(long i) +void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { + GenNumberOfLeadingZeroes(invoke->GetLocations(), true, IsR6(), GetAssembler()); +} + +static void GenNumberOfTrailingZeroes(LocationSummary* locations, + bool is64bit, + bool isR6, + bool isR2OrNewer, + MipsAssembler* assembler) { + Register out = locations->Out().AsRegister<Register>(); + Register in_lo; + Register in; + + if (is64bit) { + MipsLabel done; + Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); + + in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); + + // If in_lo is zero then count the number of trailing zeroes in in_hi; + // otherwise count the number of trailing zeroes in in_lo. + // AT = in_lo ? in_lo : in_hi; + if (isR6) { + __ Seleqz(out, in_hi, in_lo); + __ Selnez(TMP, in_lo, in_lo); + __ Or(out, out, TMP); + } else { + __ Movz(out, in_hi, in_lo); + __ Movn(out, in_lo, in_lo); + } + + in = out; + } else { + in = locations->InAt(0).AsRegister<Register>(); + // Give in_lo a dummy value to keep the compiler from complaining. + // Since we only get here in the 32-bit case, this value will never + // be used. + in_lo = in; + } + + // We don't have an instruction to count the number of trailing zeroes. + // Start by flipping the bits end-for-end so we can count the number of + // leading zeroes instead. + if (isR2OrNewer) { + __ Rotr(out, in, 16); + __ Wsbh(out, out); + } else { + // MIPS32r1 + // __ Rotr(out, in, 16); + __ Sll(TMP, in, 16); + __ Srl(out, in, 16); + __ Or(out, out, TMP); + // __ Wsbh(out, out); + __ LoadConst32(AT, 0x00FF00FF); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 8); + __ Srl(out, out, 8); + __ And(out, out, AT); + __ Or(out, out, TMP); + } + + if (isR6) { + __ Bitswap(out, out); + __ ClzR6(out, out); + } else { + __ LoadConst32(AT, 0x0F0F0F0F); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 4); + __ Srl(out, out, 4); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ LoadConst32(AT, 0x33333333); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 2); + __ Srl(out, out, 2); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ LoadConst32(AT, 0x55555555); + __ And(TMP, out, AT); + __ Sll(TMP, TMP, 1); + __ Srl(out, out, 1); + __ And(out, out, AT); + __ Or(out, TMP, out); + __ ClzR2(out, out); + } + + if (is64bit) { + // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the + // number of trailing zeroes in in_lo (32) to get the correct final count + __ LoadConst32(TMP, 32); + if (isR6) { + __ Seleqz(TMP, TMP, in_lo); + } else { + __ Movn(TMP, ZERO, in_lo); + } + __ Addu(out, out, TMP); + } +} + +// int java.lang.Integer.numberOfTrailingZeros(int i) +void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { + GenNumberOfTrailingZeroes(invoke->GetLocations(), false, IsR6(), IsR2OrNewer(), GetAssembler()); +} + +// int java.lang.Long.numberOfTrailingZeros(long i) +void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { + GenNumberOfTrailingZeroes(invoke->GetLocations(), true, IsR6(), IsR2OrNewer(), GetAssembler()); +} + +enum RotationDirection { + kRotateRight, + kRotateLeft, +}; + +static void GenRotate(HInvoke* invoke, + Primitive::Type type, + bool isR2OrNewer, + RotationDirection direction, + MipsAssembler* assembler) { + DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); + + LocationSummary* locations = invoke->GetLocations(); + if (invoke->InputAt(1)->IsIntConstant()) { + int32_t shift = static_cast<int32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()); + if (type == Primitive::kPrimInt) { + Register in = locations->InAt(0).AsRegister<Register>(); + Register out = locations->Out().AsRegister<Register>(); + + shift &= 0x1f; + if (direction == kRotateLeft) { + shift = (32 - shift) & 0x1F; + } + + if (isR2OrNewer) { + if ((shift != 0) || (out != in)) { + __ Rotr(out, in, shift); + } + } else { + if (shift == 0) { + if (out != in) { + __ Move(out, in); + } + } else { + __ Srl(AT, in, shift); + __ Sll(out, in, 32 - shift); + __ Or(out, out, AT); + } + } + } else { // Primitive::kPrimLong + Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); + Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); + Register out_lo = locations->Out().AsRegisterPairLow<Register>(); + Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); + + shift &= 0x3f; + if (direction == kRotateLeft) { + shift = (64 - shift) & 0x3F; + } + + if (shift == 0) { + __ Move(out_lo, in_lo); + __ Move(out_hi, in_hi); + } else if (shift == 32) { + __ Move(out_lo, in_hi); + __ Move(out_hi, in_lo); + } else if (shift < 32) { + __ Srl(AT, in_lo, shift); + __ Sll(out_lo, in_hi, 32 - shift); + __ Or(out_lo, out_lo, AT); + __ Srl(AT, in_hi, shift); + __ Sll(out_hi, in_lo, 32 - shift); + __ Or(out_hi, out_hi, AT); + } else { + __ Sll(AT, in_lo, 64 - shift); + __ Srl(out_lo, in_hi, shift - 32); + __ Or(out_lo, out_lo, AT); + __ Sll(AT, in_hi, 64 - shift); + __ Srl(out_hi, in_lo, shift - 32); + __ Or(out_hi, out_hi, AT); + } + } + } else { // !invoke->InputAt(1)->IsIntConstant() + Register shamt = locations->InAt(1).AsRegister<Register>(); + if (type == Primitive::kPrimInt) { + Register in = locations->InAt(0).AsRegister<Register>(); + Register out = locations->Out().AsRegister<Register>(); + + if (isR2OrNewer) { + if (direction == kRotateRight) { + __ Rotrv(out, in, shamt); + } else { + // negu tmp, shamt + __ Subu(TMP, ZERO, shamt); + __ Rotrv(out, in, TMP); + } + } else { + if (direction == kRotateRight) { + __ Srlv(AT, in, shamt); + __ Subu(TMP, ZERO, shamt); + __ Sllv(out, in, TMP); + __ Or(out, out, AT); + } else { + __ Sllv(AT, in, shamt); + __ Subu(TMP, ZERO, shamt); + __ Srlv(out, in, TMP); + __ Or(out, out, AT); + } + } + } else { // Primitive::kPrimLong + Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>(); + Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>(); + Register out_lo = locations->Out().AsRegisterPairLow<Register>(); + Register out_hi = locations->Out().AsRegisterPairHigh<Register>(); + + MipsLabel done; + + if (direction == kRotateRight) { + __ Nor(TMP, ZERO, shamt); + __ Srlv(AT, in_lo, shamt); + __ Sll(out_lo, in_hi, 1); + __ Sllv(out_lo, out_lo, TMP); + __ Or(out_lo, out_lo, AT); + __ Srlv(AT, in_hi, shamt); + __ Sll(out_hi, in_lo, 1); + __ Sllv(out_hi, out_hi, TMP); + __ Or(out_hi, out_hi, AT); + } else { + __ Nor(TMP, ZERO, shamt); + __ Sllv(AT, in_lo, shamt); + __ Srl(out_lo, in_hi, 1); + __ Srlv(out_lo, out_lo, TMP); + __ Or(out_lo, out_lo, AT); + __ Sllv(AT, in_hi, shamt); + __ Srl(out_hi, in_lo, 1); + __ Srlv(out_hi, out_hi, TMP); + __ Or(out_hi, out_hi, AT); + } + + __ Andi(TMP, shamt, 32); + __ Beqz(TMP, &done); + __ Move(TMP, out_hi); + __ Move(out_hi, out_lo); + __ Move(out_lo, TMP); + + __ Bind(&done); + } + } +} + +// int java.lang.Integer.rotateRight(int i, int distance) +void IntrinsicLocationsBuilderMIPS::VisitIntegerRotateRight(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerRotateRight(HInvoke* invoke) { + GenRotate(invoke, Primitive::kPrimInt, IsR2OrNewer(), kRotateRight, GetAssembler()); +} + +// long java.lang.Long.rotateRight(long i, int distance) +void IntrinsicLocationsBuilderMIPS::VisitLongRotateRight(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongRotateRight(HInvoke* invoke) { + GenRotate(invoke, Primitive::kPrimLong, IsR2OrNewer(), kRotateRight, GetAssembler()); +} + +// int java.lang.Integer.rotateLeft(int i, int distance) +void IntrinsicLocationsBuilderMIPS::VisitIntegerRotateLeft(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerRotateLeft(HInvoke* invoke) { + GenRotate(invoke, Primitive::kPrimInt, IsR2OrNewer(), kRotateLeft, GetAssembler()); +} + +// long java.lang.Long.rotateLeft(long i, int distance) +void IntrinsicLocationsBuilderMIPS::VisitLongRotateLeft(HInvoke* invoke) { + LocationSummary* locations = new (arena_) LocationSummary(invoke, + LocationSummary::kNoCall, + kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); + locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongRotateLeft(HInvoke* invoke) { + GenRotate(invoke, Primitive::kPrimLong, IsR2OrNewer(), kRotateLeft, GetAssembler()); +} + +// int java.lang.Integer.reverse(int) +void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) { + GenReverse(invoke->GetLocations(), + Primitive::kPrimInt, + IsR2OrNewer(), + IsR6(), + true, + GetAssembler()); +} + +// long java.lang.Long.reverse(long) +void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) { + CreateIntToIntLocations(arena_, invoke); +} + +void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) { + GenReverse(invoke->GetLocations(), + Primitive::kPrimLong, + IsR2OrNewer(), + IsR6(), + true, + GetAssembler()); } // boolean java.lang.String.equals(Object anObject) @@ -463,10 +931,6 @@ void IntrinsicLocationsBuilderMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUS void IntrinsicCodeGeneratorMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ } -UNIMPLEMENTED_INTRINSIC(IntegerReverse) -UNIMPLEMENTED_INTRINSIC(LongReverse) -UNIMPLEMENTED_INTRINSIC(LongNumberOfLeadingZeros) -UNIMPLEMENTED_INTRINSIC(IntegerNumberOfLeadingZeros) UNIMPLEMENTED_INTRINSIC(MathAbsDouble) UNIMPLEMENTED_INTRINSIC(MathAbsFloat) UNIMPLEMENTED_INTRINSIC(MathAbsInt) @@ -519,12 +983,6 @@ UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter) UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes) UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars) UNIMPLEMENTED_INTRINSIC(StringNewStringFromString) -UNIMPLEMENTED_INTRINSIC(LongRotateLeft) -UNIMPLEMENTED_INTRINSIC(LongRotateRight) -UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros) -UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) -UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) -UNIMPLEMENTED_INTRINSIC(IntegerNumberOfTrailingZeros) UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck) diff --git a/compiler/optimizing/intrinsics_mips.h b/compiler/optimizing/intrinsics_mips.h index c71b3c68b7..19ad5255d5 100644 --- a/compiler/optimizing/intrinsics_mips.h +++ b/compiler/optimizing/intrinsics_mips.h @@ -67,6 +67,9 @@ INTRINSICS_LIST(OPTIMIZING_INTRINSICS) #undef INTRINSICS_LIST #undef OPTIMIZING_INTRINSICS + bool IsR2OrNewer(void); + bool IsR6(void); + private: MipsAssembler* GetAssembler(); diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc index ecee11dea6..36e1b20e4e 100644 --- a/compiler/optimizing/intrinsics_mips64.cc +++ b/compiler/optimizing/intrinsics_mips64.cc @@ -162,7 +162,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* in } void IntrinsicCodeGeneratorMIPS64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // int java.lang.Float.floatToRawIntBits(float) @@ -171,7 +171,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invok } void IntrinsicCodeGeneratorMIPS64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -199,7 +199,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invok } void IntrinsicCodeGeneratorMIPS64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // float java.lang.Float.intBitsToFloat(int) @@ -208,7 +208,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS64::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -290,7 +290,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* } void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { - GenNumberOfLeadingZeroes(invoke->GetLocations(), false, GetAssembler()); + GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } // int java.lang.Long.numberOfLeadingZeros(long i) @@ -299,7 +299,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* inv } void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { - GenNumberOfLeadingZeroes(invoke->GetLocations(), true, GetAssembler()); + GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenNumberOfTrailingZeroes(LocationSummary* locations, @@ -327,7 +327,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* } void IntrinsicCodeGeneratorMIPS64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { - GenNumberOfTrailingZeroes(invoke->GetLocations(), false, GetAssembler()); + GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } // int java.lang.Long.numberOfTrailingZeros(long i) @@ -336,7 +336,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* in } void IntrinsicCodeGeneratorMIPS64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { - GenNumberOfTrailingZeroes(invoke->GetLocations(), true, GetAssembler()); + GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenRotateRight(HInvoke* invoke, @@ -525,7 +525,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } // float java.lang.Math.abs(float) @@ -534,7 +534,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) { @@ -566,7 +566,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathAbsInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathAbsInt(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), false, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } // long java.lang.Math.abs(long) @@ -575,7 +575,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathAbsLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathAbsLong(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), true, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenMinMaxFP(LocationSummary* locations, @@ -616,7 +616,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS64::VisitMathMinDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetAssembler()); } // float java.lang.Math.min(float, float) @@ -625,7 +625,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMinFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetAssembler()); } // double java.lang.Math.max(double, double) @@ -634,7 +634,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS64::VisitMathMaxDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetAssembler()); } // float java.lang.Math.max(float, float) @@ -643,7 +643,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMaxFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetAssembler()); } static void GenMinMax(LocationSummary* locations, @@ -713,7 +713,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler()); } // long java.lang.Math.min(long, long) @@ -722,7 +722,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMinLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMinLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler()); } // int java.lang.Math.max(int, int) @@ -731,7 +731,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler()); } // long java.lang.Math.max(long, long) @@ -740,7 +740,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitMathMaxLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitMathMaxLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler()); } // double java.lang.Math.sqrt(double) @@ -1045,7 +1045,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGet(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } // int sun.misc.Unsafe.getIntVolatile(Object o, long offset) @@ -1054,7 +1054,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } // long sun.misc.Unsafe.getLong(Object o, long offset) @@ -1063,7 +1063,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } // long sun.misc.Unsafe.getLongVolatile(Object o, long offset) @@ -1072,7 +1072,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } // Object sun.misc.Unsafe.getObject(Object o, long offset) @@ -1081,7 +1081,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObject(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } // Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset) @@ -1090,7 +1090,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invo } void IntrinsicCodeGeneratorMIPS64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { @@ -1151,7 +1151,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePut(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x) @@ -1160,7 +1164,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } // void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x) @@ -1169,7 +1177,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimInt, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putObject(Object o, long offset, Object x) @@ -1178,7 +1190,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObject(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x) @@ -1187,7 +1203,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invok } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } // void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x) @@ -1196,7 +1216,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invo } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimNot, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putLong(Object o, long offset, long x) @@ -1205,7 +1229,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ false, + codegen_); } // void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x) @@ -1214,7 +1242,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ false, + /* is_ordered */ true, + codegen_); } // void sun.misc.Unsafe.putLongVolatile(Object o, long offset, long x) @@ -1223,7 +1255,11 @@ void IntrinsicLocationsBuilderMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke } void IntrinsicCodeGeneratorMIPS64::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); + GenUnsafePut(invoke->GetLocations(), + Primitive::kPrimLong, + /* is_volatile */ true, + /* is_ordered */ false, + codegen_); } static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) { @@ -1565,7 +1601,7 @@ void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOf(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); + GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } // int java.lang.String.indexOf(int ch, int fromIndex) @@ -1584,7 +1620,8 @@ void IntrinsicLocationsBuilderMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); + GenerateStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } // java.lang.String.String(byte[] bytes) diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 371588fc47..5b67cdefa3 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -138,31 +138,31 @@ static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* } void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - CreateFPToIntLocations(arena_, invoke, true); + CreateFPToIntLocations(arena_, invoke, /* is64bit */ true); } void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - CreateIntToFPLocations(arena_, invoke, true); + CreateIntToFPLocations(arena_, invoke, /* is64bit */ true); } void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - CreateFPToIntLocations(arena_, invoke, false); + CreateFPToIntLocations(arena_, invoke, /* is64bit */ false); } void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) { - CreateIntToFPLocations(arena_, invoke, false); + CreateIntToFPLocations(arena_, invoke, /* is64bit */ false); } void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -298,7 +298,7 @@ void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) { @@ -306,7 +306,7 @@ void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetAssembler()); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) { @@ -490,7 +490,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) { @@ -498,7 +498,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) { @@ -506,7 +506,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) { @@ -514,7 +514,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler()); + GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetAssembler()); } static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long, @@ -597,7 +597,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) { @@ -605,7 +605,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) { @@ -613,7 +613,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) { @@ -621,7 +621,7 @@ void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler()); } static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -1265,19 +1265,20 @@ static void GenerateStringIndexOf(HInvoke* invoke, } void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) { - CreateStringIndexOfLocations(invoke, arena_, true); + CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true); } void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); + GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) { - CreateStringIndexOfLocations(invoke, arena_, false); + CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false); } void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); + GenerateStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) { @@ -1660,42 +1661,42 @@ static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke } void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, false); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, true); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ true); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, false); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ true, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, true, true); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ true, /* is_volatile */ true); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, false); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - CreateIntIntIntToIntLocations(arena_, invoke, false, true); + CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ true); } void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } @@ -1722,31 +1723,40 @@ static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena, } void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, true); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimInt, invoke, /* is_volatile */ true); } void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, true); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimNot, invoke, /* is_volatile */ true); } void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false); } void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) { - CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, true); + CreateIntIntIntIntToVoidPlusTempsLocations( + arena_, Primitive::kPrimLong, invoke, /* is_volatile */ true); } // We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86 @@ -1798,31 +1808,31 @@ static void GenUnsafePut(LocationSummary* locations, } void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type, diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 2d9f01b821..ecd129f31e 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -115,10 +115,10 @@ void IntrinsicLocationsBuilderX86_64::VisitDoubleLongBitsToDouble(HInvoke* invok } void IntrinsicCodeGeneratorX86_64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicCodeGeneratorX86_64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { @@ -129,10 +129,10 @@ void IntrinsicLocationsBuilderX86_64::VisitFloatIntBitsToFloat(HInvoke* invoke) } void IntrinsicCodeGeneratorX86_64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { - MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); + MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } void IntrinsicCodeGeneratorX86_64::VisitFloatIntBitsToFloat(HInvoke* invoke) { - MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); + MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -230,7 +230,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathAbsDouble(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathAbsDouble(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), true, GetAssembler(), codegen_); + MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler(), codegen_); } void IntrinsicLocationsBuilderX86_64::VisitMathAbsFloat(HInvoke* invoke) { @@ -238,7 +238,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathAbsFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathAbsFloat(HInvoke* invoke) { - MathAbsFP(invoke->GetLocations(), false, GetAssembler(), codegen_); + MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler(), codegen_); } static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) { @@ -277,7 +277,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathAbsInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathAbsInt(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), false, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitMathAbsLong(HInvoke* invoke) { @@ -285,7 +285,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathAbsLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathAbsLong(HInvoke* invoke) { - GenAbsInteger(invoke->GetLocations(), true, GetAssembler()); + GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); } static void GenMinMaxFP(LocationSummary* locations, @@ -388,7 +388,8 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMinDoubleDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorX86_64::VisitMathMinDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler(), codegen_); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetAssembler(), codegen_); } void IntrinsicLocationsBuilderX86_64::VisitMathMinFloatFloat(HInvoke* invoke) { @@ -396,7 +397,8 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMinFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMinFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler(), codegen_); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetAssembler(), codegen_); } void IntrinsicLocationsBuilderX86_64::VisitMathMaxDoubleDouble(HInvoke* invoke) { @@ -404,7 +406,8 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMaxDoubleDouble(HInvoke* invoke) } void IntrinsicCodeGeneratorX86_64::VisitMathMaxDoubleDouble(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler(), codegen_); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetAssembler(), codegen_); } void IntrinsicLocationsBuilderX86_64::VisitMathMaxFloatFloat(HInvoke* invoke) { @@ -412,7 +415,8 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMaxFloatFloat(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMaxFloatFloat(HInvoke* invoke) { - GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler(), codegen_); + GenMinMaxFP( + invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetAssembler(), codegen_); } static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long, @@ -461,7 +465,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMinIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMinIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitMathMinLongLong(HInvoke* invoke) { @@ -469,7 +473,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMinLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMinLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), true, true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitMathMaxIntInt(HInvoke* invoke) { @@ -477,7 +481,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMaxIntInt(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMaxIntInt(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, false, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler()); } void IntrinsicLocationsBuilderX86_64::VisitMathMaxLongLong(HInvoke* invoke) { @@ -485,7 +489,7 @@ void IntrinsicLocationsBuilderX86_64::VisitMathMaxLongLong(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitMathMaxLongLong(HInvoke* invoke) { - GenMinMax(invoke->GetLocations(), false, true, GetAssembler()); + GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler()); } static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { @@ -690,7 +694,7 @@ void IntrinsicCodeGeneratorX86_64::VisitMathRoundDouble(HInvoke* invoke) { __ j(kUnordered, &nan); // output = double-to-long-truncate(input) - __ cvttsd2si(out, inPlusPointFive, true); + __ cvttsd2si(out, inPlusPointFive, /* is64bit */ true); __ jmp(&done); __ Bind(&nan); @@ -1152,7 +1156,7 @@ void IntrinsicCodeGeneratorX86_64::VisitSystemArrayCopy(HInvoke* invoke) { temp2, dest, CpuRegister(kNoRegister), - false); + /* value_can_be_null */ false); __ Bind(slow_path->GetExitLabel()); } @@ -1180,8 +1184,8 @@ void IntrinsicCodeGeneratorX86_64::VisitStringCompareTo(HInvoke* invoke) { codegen_->AddSlowPath(slow_path); __ j(kEqual, slow_path->GetEntryLabel()); - __ gs()->call(Address::Absolute( - QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pStringCompareTo), true)); + __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pStringCompareTo), + /* no_rip */ true)); __ Bind(slow_path->GetExitLabel()); } @@ -1372,7 +1376,7 @@ static void GenerateStringIndexOf(HInvoke* invoke, // Ensure we have a start index >= 0; __ xorl(counter, counter); __ cmpl(start_index, Immediate(0)); - __ cmov(kGreater, counter, start_index, false); // 32-bit copy is enough. + __ cmov(kGreater, counter, start_index, /* is64bit */ false); // 32-bit copy is enough. // Move to the start of the string: string_obj + value_offset + 2 * start_index. __ leaq(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset)); @@ -1409,19 +1413,20 @@ static void GenerateStringIndexOf(HInvoke* invoke, } void IntrinsicLocationsBuilderX86_64::VisitStringIndexOf(HInvoke* invoke) { - CreateStringIndexOfLocations(invoke, arena_, true); + CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true); } void IntrinsicCodeGeneratorX86_64::VisitStringIndexOf(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true); + GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); } void IntrinsicLocationsBuilderX86_64::VisitStringIndexOfAfter(HInvoke* invoke) { - CreateStringIndexOfLocations(invoke, arena_, false); + CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false); } void IntrinsicCodeGeneratorX86_64::VisitStringIndexOfAfter(HInvoke* invoke) { - GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false); + GenerateStringIndexOf( + invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); } void IntrinsicLocationsBuilderX86_64::VisitStringNewStringFromBytes(HInvoke* invoke) { @@ -1446,8 +1451,8 @@ void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromBytes(HInvoke* invoke codegen_->AddSlowPath(slow_path); __ j(kEqual, slow_path->GetEntryLabel()); - __ gs()->call(Address::Absolute( - QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromBytes), true)); + __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromBytes), + /* no_rip */ true)); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); __ Bind(slow_path->GetExitLabel()); } @@ -1466,8 +1471,8 @@ void IntrinsicLocationsBuilderX86_64::VisitStringNewStringFromChars(HInvoke* inv void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromChars(HInvoke* invoke) { X86_64Assembler* assembler = GetAssembler(); - __ gs()->call(Address::Absolute( - QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromChars), true)); + __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromChars), + /* no_rip */ true)); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); } @@ -1490,8 +1495,8 @@ void IntrinsicCodeGeneratorX86_64::VisitStringNewStringFromString(HInvoke* invok codegen_->AddSlowPath(slow_path); __ j(kEqual, slow_path->GetEntryLabel()); - __ gs()->call(Address::Absolute( - QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromString), true)); + __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocStringFromString), + /* no_rip */ true)); codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); __ Bind(slow_path->GetExitLabel()); } @@ -1715,7 +1720,8 @@ void IntrinsicLocationsBuilderX86_64::VisitThreadCurrentThread(HInvoke* invoke) void IntrinsicCodeGeneratorX86_64::VisitThreadCurrentThread(HInvoke* invoke) { CpuRegister out = invoke->GetLocations()->Out().AsRegister<CpuRegister>(); - GetAssembler()->gs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86_64WordSize>(), true)); + GetAssembler()->gs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86_64WordSize>(), + /* no_rip */ true)); } static void GenUnsafeGet(HInvoke* invoke, @@ -1786,22 +1792,22 @@ void IntrinsicLocationsBuilderX86_64::VisitUnsafeGetObjectVolatile(HInvoke* invo void IntrinsicCodeGeneratorX86_64::VisitUnsafeGet(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetLong(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetObject(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { - GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); + GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); } @@ -1885,31 +1891,31 @@ static void GenUnsafePut(LocationSummary* locations, Primitive::Type type, bool } void IntrinsicCodeGeneratorX86_64::VisitUnsafePut(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutObject(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutLong(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutLongOrdered(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_); } void IntrinsicCodeGeneratorX86_64::VisitUnsafePutLongVolatile(HInvoke* invoke) { - GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_); + GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_); } static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type, diff --git a/compiler/optimizing/licm.cc b/compiler/optimizing/licm.cc index c38bbe3477..02befc011a 100644 --- a/compiler/optimizing/licm.cc +++ b/compiler/optimizing/licm.cc @@ -121,6 +121,8 @@ void LICM::Run() { // phi in it. if (instruction->NeedsEnvironment()) { UpdateLoopPhisIn(instruction->GetEnvironment(), loop_info); + } else { + DCHECK(!instruction->HasEnvironment()); } instruction->MoveBefore(pre_header->GetLastInstruction()); } else if (instruction->CanThrow()) { diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc index 5b89cfef5a..680f89f9b9 100644 --- a/compiler/optimizing/load_store_elimination.cc +++ b/compiler/optimizing/load_store_elimination.cc @@ -933,8 +933,9 @@ class LSEVisitor : public HGraphVisitor { }; void LoadStoreElimination::Run() { - if (graph_->IsDebuggable()) { + if (graph_->IsDebuggable() || graph_->HasTryCatch()) { // Debugger may set heap values or trigger deoptimization of callers. + // Try/catch support not implemented yet. // Skip this optimization. return; } diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 19614f11c6..9d3c88c79e 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -371,6 +371,9 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { bool HasTryCatch() const { return has_try_catch_; } void SetHasTryCatch(bool value) { has_try_catch_ = value; } + ArtMethod* GetArtMethod() const { return art_method_; } + void SetArtMethod(ArtMethod* method) { art_method_ = method; } + // Returns an instruction with the opposite boolean value from 'cond'. // The instruction has been inserted into the graph, either as a constant, or // before cursor. @@ -479,6 +482,11 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { HCurrentMethod* cached_current_method_; + // The ArtMethod this graph is for. Note that for AOT, it may be null, + // for example for methods whose declaring class could not be resolved + // (such as when the superclass could not be found). + ArtMethod* art_method_; + friend class SsaBuilder; // For caching constants. friend class SsaLivenessAnalysis; // For the linear order. ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1); @@ -2462,11 +2470,15 @@ class HTryBoundary : public HTemplateInstruction<0> { // Deoptimize to interpreter, upon checking a condition. class HDeoptimize : public HTemplateInstruction<1> { public: - explicit HDeoptimize(HInstruction* cond, uint32_t dex_pc) + HDeoptimize(HInstruction* cond, uint32_t dex_pc) : HTemplateInstruction(SideEffects::None(), dex_pc) { SetRawInputAt(0, cond); } + bool CanBeMoved() const OVERRIDE { return true; } + bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { + return true; + } bool NeedsEnvironment() const OVERRIDE { return true; } bool CanThrow() const OVERRIDE { return true; } diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index cae2d3f01b..4643aed9cc 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -427,7 +427,7 @@ static void MaybeRunInliner(HGraph* graph, return; } HInliner* inliner = new (graph->GetArena()) HInliner( - graph, codegen, dex_compilation_unit, dex_compilation_unit, driver, handles, stats); + graph, graph, codegen, dex_compilation_unit, dex_compilation_unit, driver, handles, stats); HOptimization* optimizations[] = { inliner }; RunOptimizations(optimizations, arraysize(optimizations), pass_observer); @@ -531,7 +531,6 @@ static void RunOptimizations(HGraph* graph, graph, stats, "instruction_simplifier_after_bce"); InstructionSimplifier* simplify4 = new (arena) InstructionSimplifier( graph, stats, "instruction_simplifier_before_codegen"); - IntrinsicsRecognizer* intrinsics = new (arena) IntrinsicsRecognizer(graph, driver); HOptimization* optimizations1[] = { @@ -543,49 +542,30 @@ static void RunOptimizations(HGraph* graph, dce1, simplify2 }; - RunOptimizations(optimizations1, arraysize(optimizations1), pass_observer); MaybeRunInliner(graph, codegen, driver, stats, dex_compilation_unit, pass_observer, &handles); - // TODO: Update passes incompatible with try/catch so we have the same - // pipeline for all methods. - if (graph->HasTryCatch()) { - HOptimization* optimizations2[] = { - boolean_simplify, - side_effects, - gvn, - dce2, - // The codegen has a few assumptions that only the instruction simplifier - // can satisfy. For example, the code generator does not expect to see a - // HTypeConversion from a type to the same type. - simplify4, - }; - - RunOptimizations(optimizations2, arraysize(optimizations2), pass_observer); - } else { - HOptimization* optimizations2[] = { - // BooleanSimplifier depends on the InstructionSimplifier removing - // redundant suspend checks to recognize empty blocks. - boolean_simplify, - fold2, // TODO: if we don't inline we can also skip fold2. - side_effects, - gvn, - licm, - induction, - bce, - fold3, // evaluates code generated by dynamic bce - simplify3, - lse, - dce2, - // The codegen has a few assumptions that only the instruction simplifier - // can satisfy. For example, the code generator does not expect to see a - // HTypeConversion from a type to the same type. - simplify4, - }; - - RunOptimizations(optimizations2, arraysize(optimizations2), pass_observer); - } + HOptimization* optimizations2[] = { + // BooleanSimplifier depends on the InstructionSimplifier removing + // redundant suspend checks to recognize empty blocks. + boolean_simplify, + fold2, // TODO: if we don't inline we can also skip fold2. + side_effects, + gvn, + licm, + induction, + bce, + fold3, // evaluates code generated by dynamic bce + simplify3, + lse, + dce2, + // The codegen has a few assumptions that only the instruction simplifier + // can satisfy. For example, the code generator does not expect to see a + // HTypeConversion from a type to the same type. + simplify4, + }; + RunOptimizations(optimizations2, arraysize(optimizations2), pass_observer); RunArchOptimizations(driver->GetInstructionSet(), graph, stats, pass_observer); AllocateRegisters(graph, codegen, pass_observer); @@ -763,8 +743,8 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena, ArtMethod* art_method = compiler_driver->ResolveMethod( soa, dex_cache, loader, &dex_compilation_unit, method_idx, invoke_type); // We may not get a method, for example if its class is erroneous. - // TODO: Clean this up, the compiler driver should just pass the ArtMethod to compile. if (art_method != nullptr) { + graph->SetArtMethod(art_method); interpreter_metadata = art_method->GetQuickenedInfo(); } } @@ -948,6 +928,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, if (stack_map_data == nullptr) { return false; } + MaybeRecordStat(MethodCompilationStat::kCompiled); codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size)); const void* code = code_cache->CommitCode( self, diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h index e5ea0f576b..6296eedfb0 100644 --- a/compiler/optimizing/optimizing_compiler_stats.h +++ b/compiler/optimizing/optimizing_compiler_stats.h @@ -49,6 +49,10 @@ enum MethodCompilationStat { kNotCompiledUnsupportedIsa, kNotCompiledVerificationError, kNotCompiledVerifyAtRuntime, + kInlinedMonomorphicCall, + kMonomorphicCall, + kPolymorphicCall, + kMegamorphicCall, kLastStat }; @@ -111,6 +115,10 @@ class OptimizingCompilerStats { case kNotCompiledUnsupportedIsa : name = "NotCompiledUnsupportedIsa"; break; case kNotCompiledVerificationError : name = "NotCompiledVerificationError"; break; case kNotCompiledVerifyAtRuntime : name = "NotCompiledVerifyAtRuntime"; break; + case kInlinedMonomorphicCall: name = "InlinedMonomorphicCall"; break; + case kMonomorphicCall: name = "MonomorphicCall"; break; + case kPolymorphicCall: name = "PolymorphicCall"; break; + case kMegamorphicCall: name = "kMegamorphicCall"; break; case kLastStat: LOG(FATAL) << "invalid stat " |