summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/nodes.cc13
-rw-r--r--compiler/optimizing/nodes.h4
-rw-r--r--compiler/optimizing/nodes_arm64.h1
-rw-r--r--compiler/optimizing/register_allocator.cc3
4 files changed, 17 insertions, 4 deletions
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 6d4275d8a6..8de9700250 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -2146,10 +2146,7 @@ void HInvoke::SetIntrinsic(Intrinsics intrinsic,
IntrinsicExceptions exceptions) {
intrinsic_ = intrinsic;
IntrinsicOptimizations opt(this);
- if (needs_env_or_cache == kNoEnvironmentOrCache) {
- opt.SetDoesNotNeedDexCache();
- opt.SetDoesNotNeedEnvironment();
- }
+
// Adjust method's side effects from intrinsic table.
switch (side_effects) {
case kNoSideEffects: SetSideEffects(SideEffects::None()); break;
@@ -2157,6 +2154,14 @@ void HInvoke::SetIntrinsic(Intrinsics intrinsic,
case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break;
case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break;
}
+
+ if (needs_env_or_cache == kNoEnvironmentOrCache) {
+ opt.SetDoesNotNeedDexCache();
+ opt.SetDoesNotNeedEnvironment();
+ } else {
+ // If we need an environment, that means there will be a call, which can trigger GC.
+ SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC()));
+ }
// Adjust method's exception status from intrinsic table.
switch (exceptions) {
case kNoThrow: SetCanThrow(false); break;
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index c06d164523..9a7dfd8abf 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -1868,6 +1868,10 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> {
return false;
}
+ virtual bool IsActualObject() const {
+ return GetType() == Primitive::kPrimNot;
+ }
+
void SetReferenceTypeInfo(ReferenceTypeInfo rti);
ReferenceTypeInfo GetReferenceTypeInfo() const {
diff --git a/compiler/optimizing/nodes_arm64.h b/compiler/optimizing/nodes_arm64.h
index 18405f2623..445cdab191 100644
--- a/compiler/optimizing/nodes_arm64.h
+++ b/compiler/optimizing/nodes_arm64.h
@@ -107,6 +107,7 @@ class HArm64IntermediateAddress : public HExpression<2> {
bool CanBeMoved() const OVERRIDE { return true; }
bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
+ bool IsActualObject() const OVERRIDE { return false; }
HInstruction* GetBaseAddress() const { return InputAt(0); }
HInstruction* GetOffset() const { return InputAt(1); }
diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc
index d399bc2d7a..9a06d9be41 100644
--- a/compiler/optimizing/register_allocator.cc
+++ b/compiler/optimizing/register_allocator.cc
@@ -1677,6 +1677,7 @@ void RegisterAllocator::ConnectSiblings(LiveInterval* interval) {
LocationSummary* locations = safepoint_position->GetLocations();
if ((current->GetType() == Primitive::kPrimNot) && current->GetParent()->HasSpillSlot()) {
+ DCHECK(interval->GetDefinedBy()->IsActualObject()) << interval->GetDefinedBy()->DebugName();
locations->SetStackBit(current->GetParent()->GetSpillSlot() / kVRegSize);
}
@@ -1689,6 +1690,8 @@ void RegisterAllocator::ConnectSiblings(LiveInterval* interval) {
maximum_number_of_live_fp_registers_);
}
if (current->GetType() == Primitive::kPrimNot) {
+ DCHECK(interval->GetDefinedBy()->IsActualObject())
+ << interval->GetDefinedBy()->DebugName();
locations->SetRegisterBit(source.reg());
}
break;