Fixes to mem_map wraparound and ARM64 quick_invoke assembly

There are only 6 free GPRs for passing in a non-static invoke. This
corrupted one register for long-signature methods.

The wrap-around did not actually wrap around correctly.

Change-Id: I62658dadeb83bb22960b9455e211d26ffaa20f6f
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 2d64e7f..9db07f8 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -453,7 +453,7 @@
     cmp w17, #'J'           // is this a long?
     bne .LisOther
 
-    cmp x8, # 7*12          // Skip this load if all registers full.
+    cmp x8, # 6*12          // Skip this load if all registers full.
     beq .LfillRegisters
 
     add x17, x12, x8        // Calculate subroutine to jump to.
@@ -461,7 +461,7 @@
 
 
 .LisOther:                  // Everything else takes one vReg.
-    cmp x8, # 7*12          // Skip this load if all registers full.
+    cmp x8, # 6*12          // Skip this load if all registers full.
     beq .LfillRegisters
     add x17, x11, x8        // Calculate subroutine to jump to.
     br x17
diff --git a/runtime/mem_map.cc b/runtime/mem_map.cc
index 0af25e7..9f01189 100644
--- a/runtime/mem_map.cc
+++ b/runtime/mem_map.cc
@@ -143,7 +143,7 @@
         // Not enough memory until 4GB.
         if (first_run) {
           // Try another time from the bottom;
-          next_mem_pos_ = LOW_MEM_START;
+          ptr = LOW_MEM_START - kPageSize;
           first_run = false;
           continue;
         } else {