Revert "Revert "Remove non-live vregs from GC map on return.""

This reverts commit 73dda0bc2adcd6a3a7d75f663a3559f8b527d485.
It also fixes the problematic line in the ReferenceMap test.

Change-Id: Ic3b62db7c040853a5ddfed589f6e0acff25d82b7
diff --git a/runtime/verifier/instruction_flags.cc b/runtime/verifier/instruction_flags.cc
index 358791d..f76c226 100644
--- a/runtime/verifier/instruction_flags.cc
+++ b/runtime/verifier/instruction_flags.cc
@@ -22,16 +22,17 @@
 namespace verifier {
 
 std::string InstructionFlags::ToString() const {
-  char encoding[6];
+  char encoding[7];
   if (!IsOpcode()) {
-    strncpy(encoding, "XXXXX", sizeof(encoding));
+    strncpy(encoding, "XXXXXX", sizeof(encoding));
   } else {
-    strncpy(encoding, "-----", sizeof(encoding));
-    if (IsInTry())        encoding[kInTry] = 'T';
-    if (IsBranchTarget()) encoding[kBranchTarget] = 'B';
+    strncpy(encoding, "------", sizeof(encoding));
+    if (IsVisited())               encoding[kVisited] = 'V';
+    if (IsChanged())               encoding[kChanged] = 'C';
+    if (IsInTry())                 encoding[kInTry] = 'T';
+    if (IsBranchTarget())          encoding[kBranchTarget] = 'B';
     if (IsCompileTimeInfoPoint())  encoding[kCompileTimeInfoPoint] = 'G';
-    if (IsVisited())      encoding[kVisited] = 'V';
-    if (IsChanged())      encoding[kChanged] = 'C';
+    if (IsReturn())                encoding[kReturn] = 'R';
   }
   return encoding;
 }
diff --git a/runtime/verifier/instruction_flags.h b/runtime/verifier/instruction_flags.h
index 9b2e595..e50ba13 100644
--- a/runtime/verifier/instruction_flags.h
+++ b/runtime/verifier/instruction_flags.h
@@ -93,6 +93,21 @@
     return IsVisited() || IsChanged();
   }
 
+  void SetReturn() {
+    flags_ |= 1 << kReturn;
+  }
+  void ClearReturn() {
+    flags_ &= ~(1 << kReturn);
+  }
+  bool IsReturn() const {
+    return (flags_ & (1 << kReturn)) != 0;
+  }
+
+  void SetCompileTimeInfoPointAndReturn() {
+    SetCompileTimeInfoPoint();
+    SetReturn();
+  }
+
   std::string ToString() const;
 
  private:
@@ -108,6 +123,8 @@
     kBranchTarget = 3,
     // Location of interest to the compiler for GC maps and verifier based method sharpening.
     kCompileTimeInfoPoint = 4,
+    // A return instruction.
+    kReturn = 5,
   };
 
   // Size of instruction in code units.
diff --git a/runtime/verifier/method_verifier.cc b/runtime/verifier/method_verifier.cc
index 2bf78d8..e182af7 100644
--- a/runtime/verifier/method_verifier.cc
+++ b/runtime/verifier/method_verifier.cc
@@ -566,8 +566,10 @@
     /* Flag instructions that are garbage collection points */
     // All invoke points are marked as "Throw" points already.
     // We are relying on this to also count all the invokes as interesting.
-    if (inst->IsBranch() || inst->IsSwitch() || inst->IsThrow() || inst->IsReturn()) {
+    if (inst->IsBranch() || inst->IsSwitch() || inst->IsThrow()) {
       insn_flags_[dex_pc].SetCompileTimeInfoPoint();
+    } else if (inst->IsReturn()) {
+      insn_flags_[dex_pc].SetCompileTimeInfoPointAndReturn();
     }
     dex_pc += inst->SizeInCodeUnits();
     inst = inst->Next();
@@ -2656,6 +2658,20 @@
       // Make workline consistent with fallthrough computed from peephole optimization.
       work_line_->CopyFromLine(fallthrough_line.get());
     }
+    if (insn_flags_[next_insn_idx].IsReturn()) {
+      // For returns we only care about the operand to the return, all other registers are dead.
+      const Instruction* ret_inst = Instruction::At(code_item_->insns_ + next_insn_idx);
+      Instruction::Code opcode = ret_inst->Opcode();
+      if ((opcode == Instruction::RETURN_VOID) || (opcode == Instruction::RETURN_VOID_BARRIER)) {
+        work_line_->MarkAllRegistersAsConflicts();
+      } else {
+        if (opcode == Instruction::RETURN_WIDE) {
+          work_line_->MarkAllRegistersAsConflictsExceptWide(ret_inst->VRegA_11x());
+        } else {
+          work_line_->MarkAllRegistersAsConflictsExcept(ret_inst->VRegA_11x());
+        }
+      }
+    }
     RegisterLine* next_line = reg_table_.GetLine(next_insn_idx);
     if (next_line != NULL) {
       // Merge registers into what we have for the next instruction,
@@ -3643,7 +3659,24 @@
      * there's nothing to "merge". Copy the registers over and mark it as changed. (This is the
      * only way a register can transition out of "unknown", so this is not just an optimization.)
      */
-    target_line->CopyFromLine(merge_line);
+    if (!insn_flags_[next_insn].IsReturn()) {
+      target_line->CopyFromLine(merge_line);
+    } else {
+      // For returns we only care about the operand to the return, all other registers are dead.
+      // Initialize them as conflicts so they don't add to GC and deoptimization information.
+      const Instruction* ret_inst = Instruction::At(code_item_->insns_ + next_insn);
+      Instruction::Code opcode = ret_inst->Opcode();
+      if ((opcode == Instruction::RETURN_VOID) || (opcode == Instruction::RETURN_VOID_BARRIER)) {
+        target_line->MarkAllRegistersAsConflicts();
+      } else {
+        target_line->CopyFromLine(merge_line);
+        if (opcode == Instruction::RETURN_WIDE) {
+          target_line->MarkAllRegistersAsConflictsExceptWide(ret_inst->VRegA_11x());
+        } else {
+          target_line->MarkAllRegistersAsConflictsExcept(ret_inst->VRegA_11x());
+        }
+      }
+    }
   } else {
     UniquePtr<RegisterLine> copy(gDebugVerify ? new RegisterLine(target_line->NumRegs(), this) : NULL);
     if (gDebugVerify) {
diff --git a/runtime/verifier/register_line.cc b/runtime/verifier/register_line.cc
index d2abaac..7965c06 100644
--- a/runtime/verifier/register_line.cc
+++ b/runtime/verifier/register_line.cc
@@ -167,7 +167,7 @@
   DCHECK(uninit_type.IsUninitializedTypes());
   const RegType& init_type = verifier_->GetRegTypeCache()->FromUninitialized(uninit_type);
   size_t changed = 0;
-  for (size_t i = 0; i < num_regs_; i++) {
+  for (uint32_t i = 0; i < num_regs_; i++) {
     if (GetRegisterType(i).Equals(uninit_type)) {
       line_[i] = init_type.GetId();
       changed++;
@@ -176,6 +176,31 @@
   DCHECK_GT(changed, 0u);
 }
 
+void RegisterLine::MarkAllRegistersAsConflicts() {
+  uint16_t conflict_type_id = verifier_->GetRegTypeCache()->Conflict().GetId();
+  for (uint32_t i = 0; i < num_regs_; i++) {
+    line_[i] = conflict_type_id;
+  }
+}
+
+void RegisterLine::MarkAllRegistersAsConflictsExcept(uint32_t vsrc) {
+  uint16_t conflict_type_id = verifier_->GetRegTypeCache()->Conflict().GetId();
+  for (uint32_t i = 0; i < num_regs_; i++) {
+    if (i != vsrc) {
+      line_[i] = conflict_type_id;
+    }
+  }
+}
+
+void RegisterLine::MarkAllRegistersAsConflictsExceptWide(uint32_t vsrc) {
+  uint16_t conflict_type_id = verifier_->GetRegTypeCache()->Conflict().GetId();
+  for (uint32_t i = 0; i < num_regs_; i++) {
+    if ((i != vsrc) && (i != (vsrc + 1))) {
+      line_[i] = conflict_type_id;
+    }
+  }
+}
+
 std::string RegisterLine::Dump() const {
   std::string result;
   for (size_t i = 0; i < num_regs_; i++) {
diff --git a/runtime/verifier/register_line.h b/runtime/verifier/register_line.h
index cde7b9b..f380877 100644
--- a/runtime/verifier/register_line.h
+++ b/runtime/verifier/register_line.h
@@ -141,6 +141,13 @@
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   /*
+   * Update all registers to be Conflict except vsrc.
+   */
+  void MarkAllRegistersAsConflicts();
+  void MarkAllRegistersAsConflictsExcept(uint32_t vsrc);
+  void MarkAllRegistersAsConflictsExceptWide(uint32_t vsrc);
+
+  /*
    * Check constraints on constructor return. Specifically, make sure that the "this" argument got
    * initialized.
    * The "this" argument to <init> uses code offset kUninitThisArgAddr, which puts it at the start
diff --git a/test/ReferenceMap/stack_walk_refmap_jni.cc b/test/ReferenceMap/stack_walk_refmap_jni.cc
index 7b6483d..885a658 100644
--- a/test/ReferenceMap/stack_walk_refmap_jni.cc
+++ b/test/ReferenceMap/stack_walk_refmap_jni.cc
@@ -103,7 +103,9 @@
       //   0024: move-object v3, v2
       //   0025: goto 0013
       // Detaled dex instructions for ReferenceMap.java are at the end of this function.
-      CHECK_REGS_CONTAIN_REFS(8, 3, 2, 1);  // v8: this, v3: y, v2: y, v1: x
+      //CHECK_REGS_CONTAIN_REFS(8, 3, 2, 1);  // v8: this, v3: y, v2: y, v1: x
+      // We eliminate the non-live registers at a return, so only v3 is live:
+      CHECK_REGS_CONTAIN_REFS(3);  // v3: y
 
       ref_bitmap = map.FindBitMap(m->NativePcOffset(m->ToFirstNativeSafepointPc(0x18U)));
       CHECK(ref_bitmap);
@@ -188,7 +190,7 @@
 //0:[Unknown],1:[Reference: java.lang.Object[]],2:[Zero],3:[Reference: java.lang.Object],4:[32-bit Constant: 2],5:[Unknown],6:[32-bit Constant: 1],7:[Zero],8:[Reference: ReferenceMap],
 //      |0010: +invoke-virtual-quick {v8, v7}, [000c] // vtable #000c
 
-//0:[Conflict],1:[Reference: java.lang.Object[]],2:[Reference: java.lang.Object],3:[Reference: java.lang.Object],4:[32-bit Constant: 2],5:[Conflict],6:[32-bit Constant: 1],7:[Zero],8:[Reference: ReferenceMap],
+//0:[Conflict],1:[Conflict],2:[Conflict],3:[Reference: java.lang.Object],4:[Conflict],5:[Conflict],6:[Conflict],7:[Conflict],8:[Conflict],
 //      |0013: return-object v3
 //      |0014: move-exception v0