Fixes to build against new VIXL interface.

- Fix namespace usage and use of deprecated functions.
- Link all dependants to new libvixl-arm64 target for now.

Change-Id: Iee6f299784fd663fc2a759f3ee816fdbc511e509
diff --git a/compiler/optimizing/common_arm64.h b/compiler/optimizing/common_arm64.h
index a849448..d2afa5b 100644
--- a/compiler/optimizing/common_arm64.h
+++ b/compiler/optimizing/common_arm64.h
@@ -21,8 +21,9 @@
 #include "locations.h"
 #include "nodes.h"
 #include "utils/arm64/assembler_arm64.h"
-#include "vixl/a64/disasm-a64.h"
-#include "vixl/a64/macro-assembler-a64.h"
+
+#include "a64/disasm-a64.h"
+#include "a64/macro-assembler-a64.h"
 
 namespace art {
 namespace arm64 {
@@ -34,87 +35,88 @@
 
 static inline int VIXLRegCodeFromART(int code) {
   if (code == SP) {
-    return vixl::kSPRegInternalCode;
+    return vixl::aarch64::kSPRegInternalCode;
   }
   if (code == XZR) {
-    return vixl::kZeroRegCode;
+    return vixl::aarch64::kZeroRegCode;
   }
   return code;
 }
 
 static inline int ARTRegCodeFromVIXL(int code) {
-  if (code == vixl::kSPRegInternalCode) {
+  if (code == vixl::aarch64::kSPRegInternalCode) {
     return SP;
   }
-  if (code == vixl::kZeroRegCode) {
+  if (code == vixl::aarch64::kZeroRegCode) {
     return XZR;
   }
   return code;
 }
 
-static inline vixl::Register XRegisterFrom(Location location) {
+static inline vixl::aarch64::Register XRegisterFrom(Location location) {
   DCHECK(location.IsRegister()) << location;
-  return vixl::Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
+  return vixl::aarch64::Register::GetXRegFromCode(VIXLRegCodeFromART(location.reg()));
 }
 
-static inline vixl::Register WRegisterFrom(Location location) {
+static inline vixl::aarch64::Register WRegisterFrom(Location location) {
   DCHECK(location.IsRegister()) << location;
-  return vixl::Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
+  return vixl::aarch64::Register::GetWRegFromCode(VIXLRegCodeFromART(location.reg()));
 }
 
-static inline vixl::Register RegisterFrom(Location location, Primitive::Type type) {
+static inline vixl::aarch64::Register RegisterFrom(Location location, Primitive::Type type) {
   DCHECK(type != Primitive::kPrimVoid && !Primitive::IsFloatingPointType(type)) << type;
   return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
 }
 
-static inline vixl::Register OutputRegister(HInstruction* instr) {
+static inline vixl::aarch64::Register OutputRegister(HInstruction* instr) {
   return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
 }
 
-static inline vixl::Register InputRegisterAt(HInstruction* instr, int input_index) {
+static inline vixl::aarch64::Register InputRegisterAt(HInstruction* instr, int input_index) {
   return RegisterFrom(instr->GetLocations()->InAt(input_index),
                       instr->InputAt(input_index)->GetType());
 }
 
-static inline vixl::FPRegister DRegisterFrom(Location location) {
+static inline vixl::aarch64::FPRegister DRegisterFrom(Location location) {
   DCHECK(location.IsFpuRegister()) << location;
-  return vixl::FPRegister::DRegFromCode(location.reg());
+  return vixl::aarch64::FPRegister::GetDRegFromCode(location.reg());
 }
 
-static inline vixl::FPRegister SRegisterFrom(Location location) {
+static inline vixl::aarch64::FPRegister SRegisterFrom(Location location) {
   DCHECK(location.IsFpuRegister()) << location;
-  return vixl::FPRegister::SRegFromCode(location.reg());
+  return vixl::aarch64::FPRegister::GetSRegFromCode(location.reg());
 }
 
-static inline vixl::FPRegister FPRegisterFrom(Location location, Primitive::Type type) {
+static inline vixl::aarch64::FPRegister FPRegisterFrom(Location location, Primitive::Type type) {
   DCHECK(Primitive::IsFloatingPointType(type)) << type;
   return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location);
 }
 
-static inline vixl::FPRegister OutputFPRegister(HInstruction* instr) {
+static inline vixl::aarch64::FPRegister OutputFPRegister(HInstruction* instr) {
   return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType());
 }
 
-static inline vixl::FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
+static inline vixl::aarch64::FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
   return FPRegisterFrom(instr->GetLocations()->InAt(input_index),
                         instr->InputAt(input_index)->GetType());
 }
 
-static inline vixl::CPURegister CPURegisterFrom(Location location, Primitive::Type type) {
-  return Primitive::IsFloatingPointType(type) ? vixl::CPURegister(FPRegisterFrom(location, type))
-                                              : vixl::CPURegister(RegisterFrom(location, type));
+static inline vixl::aarch64::CPURegister CPURegisterFrom(Location location, Primitive::Type type) {
+  return Primitive::IsFloatingPointType(type)
+      ? vixl::aarch64::CPURegister(FPRegisterFrom(location, type))
+      : vixl::aarch64::CPURegister(RegisterFrom(location, type));
 }
 
-static inline vixl::CPURegister OutputCPURegister(HInstruction* instr) {
+static inline vixl::aarch64::CPURegister OutputCPURegister(HInstruction* instr) {
   return Primitive::IsFloatingPointType(instr->GetType())
-      ? static_cast<vixl::CPURegister>(OutputFPRegister(instr))
-      : static_cast<vixl::CPURegister>(OutputRegister(instr));
+      ? static_cast<vixl::aarch64::CPURegister>(OutputFPRegister(instr))
+      : static_cast<vixl::aarch64::CPURegister>(OutputRegister(instr));
 }
 
-static inline vixl::CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
+static inline vixl::aarch64::CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
   return Primitive::IsFloatingPointType(instr->InputAt(index)->GetType())
-      ? static_cast<vixl::CPURegister>(InputFPRegisterAt(instr, index))
-      : static_cast<vixl::CPURegister>(InputRegisterAt(instr, index));
+      ? static_cast<vixl::aarch64::CPURegister>(InputFPRegisterAt(instr, index))
+      : static_cast<vixl::aarch64::CPURegister>(InputRegisterAt(instr, index));
 }
 
 static inline int64_t Int64ConstantFrom(Location location) {
@@ -129,63 +131,70 @@
   }
 }
 
-static inline vixl::Operand OperandFrom(Location location, Primitive::Type type) {
+static inline vixl::aarch64::Operand OperandFrom(Location location, Primitive::Type type) {
   if (location.IsRegister()) {
-    return vixl::Operand(RegisterFrom(location, type));
+    return vixl::aarch64::Operand(RegisterFrom(location, type));
   } else {
-    return vixl::Operand(Int64ConstantFrom(location));
+    return vixl::aarch64::Operand(Int64ConstantFrom(location));
   }
 }
 
-static inline vixl::Operand InputOperandAt(HInstruction* instr, int input_index) {
+static inline vixl::aarch64::Operand InputOperandAt(HInstruction* instr, int input_index) {
   return OperandFrom(instr->GetLocations()->InAt(input_index),
                      instr->InputAt(input_index)->GetType());
 }
 
-static inline vixl::MemOperand StackOperandFrom(Location location) {
-  return vixl::MemOperand(vixl::sp, location.GetStackIndex());
+static inline vixl::aarch64::MemOperand StackOperandFrom(Location location) {
+  return vixl::aarch64::MemOperand(vixl::aarch64::sp, location.GetStackIndex());
 }
 
-static inline vixl::MemOperand HeapOperand(const vixl::Register& base, size_t offset = 0) {
+static inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base,
+                                                    size_t offset = 0) {
   // A heap reference must be 32bit, so fit in a W register.
   DCHECK(base.IsW());
-  return vixl::MemOperand(base.X(), offset);
+  return vixl::aarch64::MemOperand(base.X(), offset);
 }
 
-static inline vixl::MemOperand HeapOperand(const vixl::Register& base,
-                                           const vixl::Register& regoffset,
-                                           vixl::Shift shift = vixl::LSL,
-                                           unsigned shift_amount = 0) {
+static inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base,
+                                                    const vixl::aarch64::Register& regoffset,
+                                                    vixl::aarch64::Shift shift = vixl::aarch64::LSL,
+                                                    unsigned shift_amount = 0) {
   // A heap reference must be 32bit, so fit in a W register.
   DCHECK(base.IsW());
-  return vixl::MemOperand(base.X(), regoffset, shift, shift_amount);
+  return vixl::aarch64::MemOperand(base.X(), regoffset, shift, shift_amount);
 }
 
-static inline vixl::MemOperand HeapOperand(const vixl::Register& base, Offset offset) {
+static inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base,
+                                                    Offset offset) {
   return HeapOperand(base, offset.SizeValue());
 }
 
-static inline vixl::MemOperand HeapOperandFrom(Location location, Offset offset) {
+static inline vixl::aarch64::MemOperand HeapOperandFrom(Location location, Offset offset) {
   return HeapOperand(RegisterFrom(location, Primitive::kPrimNot), offset);
 }
 
-static inline Location LocationFrom(const vixl::Register& reg) {
-  return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
+static inline Location LocationFrom(const vixl::aarch64::Register& reg) {
+  return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.GetCode()));
 }
 
-static inline Location LocationFrom(const vixl::FPRegister& fpreg) {
-  return Location::FpuRegisterLocation(fpreg.code());
+static inline Location LocationFrom(const vixl::aarch64::FPRegister& fpreg) {
+  return Location::FpuRegisterLocation(fpreg.GetCode());
 }
 
-static inline vixl::Operand OperandFromMemOperand(const vixl::MemOperand& mem_op) {
+static inline vixl::aarch64::Operand OperandFromMemOperand(
+    const vixl::aarch64::MemOperand& mem_op) {
   if (mem_op.IsImmediateOffset()) {
-    return vixl::Operand(mem_op.offset());
+    return vixl::aarch64::Operand(mem_op.GetOffset());
   } else {
     DCHECK(mem_op.IsRegisterOffset());
-    if (mem_op.extend() != vixl::NO_EXTEND) {
-      return vixl::Operand(mem_op.regoffset(), mem_op.extend(), mem_op.shift_amount());
-    } else if (mem_op.shift() != vixl::NO_SHIFT) {
-      return vixl::Operand(mem_op.regoffset(), mem_op.shift(), mem_op.shift_amount());
+    if (mem_op.GetExtend() != vixl::aarch64::NO_EXTEND) {
+      return vixl::aarch64::Operand(mem_op.GetRegisterOffset(),
+                                    mem_op.GetExtend(),
+                                    mem_op.GetShiftAmount());
+    } else if (mem_op.GetShift() != vixl::aarch64::NO_SHIFT) {
+      return vixl::aarch64::Operand(mem_op.GetRegisterOffset(),
+                                    mem_op.GetShift(),
+                                    mem_op.GetShiftAmount());
     } else {
       LOG(FATAL) << "Should not reach here";
       UNREACHABLE();
@@ -212,10 +221,10 @@
 
   if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) {
     // Uses logical operations.
-    return vixl::Assembler::IsImmLogical(value, vixl::kXRegSize);
+    return vixl::aarch64::Assembler::IsImmLogical(value, vixl::aarch64::kXRegSize);
   } else if (instr->IsNeg()) {
     // Uses mov -immediate.
-    return vixl::Assembler::IsImmMovn(value, vixl::kXRegSize);
+    return vixl::aarch64::Assembler::IsImmMovn(value, vixl::aarch64::kXRegSize);
   } else {
     DCHECK(instr->IsAdd() ||
            instr->IsArm64IntermediateAddress() ||
@@ -227,7 +236,8 @@
     // Uses aliases of ADD/SUB instructions.
     // If `value` does not fit but `-value` does, VIXL will automatically use
     // the 'opposite' instruction.
-    return vixl::Assembler::IsImmAddSub(value) || vixl::Assembler::IsImmAddSub(-value);
+    return vixl::aarch64::Assembler::IsImmAddSub(value)
+        || vixl::aarch64::Assembler::IsImmAddSub(-value);
   }
 }
 
@@ -263,30 +273,30 @@
   return true;
 }
 
-static inline vixl::Shift ShiftFromOpKind(HArm64DataProcWithShifterOp::OpKind op_kind) {
+static inline vixl::aarch64::Shift ShiftFromOpKind(HArm64DataProcWithShifterOp::OpKind op_kind) {
   switch (op_kind) {
-    case HArm64DataProcWithShifterOp::kASR: return vixl::ASR;
-    case HArm64DataProcWithShifterOp::kLSL: return vixl::LSL;
-    case HArm64DataProcWithShifterOp::kLSR: return vixl::LSR;
+    case HArm64DataProcWithShifterOp::kASR: return vixl::aarch64::ASR;
+    case HArm64DataProcWithShifterOp::kLSL: return vixl::aarch64::LSL;
+    case HArm64DataProcWithShifterOp::kLSR: return vixl::aarch64::LSR;
     default:
       LOG(FATAL) << "Unexpected op kind " << op_kind;
       UNREACHABLE();
-      return vixl::NO_SHIFT;
+      return vixl::aarch64::NO_SHIFT;
   }
 }
 
-static inline vixl::Extend ExtendFromOpKind(HArm64DataProcWithShifterOp::OpKind op_kind) {
+static inline vixl::aarch64::Extend ExtendFromOpKind(HArm64DataProcWithShifterOp::OpKind op_kind) {
   switch (op_kind) {
-    case HArm64DataProcWithShifterOp::kUXTB: return vixl::UXTB;
-    case HArm64DataProcWithShifterOp::kUXTH: return vixl::UXTH;
-    case HArm64DataProcWithShifterOp::kUXTW: return vixl::UXTW;
-    case HArm64DataProcWithShifterOp::kSXTB: return vixl::SXTB;
-    case HArm64DataProcWithShifterOp::kSXTH: return vixl::SXTH;
-    case HArm64DataProcWithShifterOp::kSXTW: return vixl::SXTW;
+    case HArm64DataProcWithShifterOp::kUXTB: return vixl::aarch64::UXTB;
+    case HArm64DataProcWithShifterOp::kUXTH: return vixl::aarch64::UXTH;
+    case HArm64DataProcWithShifterOp::kUXTW: return vixl::aarch64::UXTW;
+    case HArm64DataProcWithShifterOp::kSXTB: return vixl::aarch64::SXTB;
+    case HArm64DataProcWithShifterOp::kSXTH: return vixl::aarch64::SXTH;
+    case HArm64DataProcWithShifterOp::kSXTW: return vixl::aarch64::SXTW;
     default:
       LOG(FATAL) << "Unexpected op kind " << op_kind;
       UNREACHABLE();
-      return vixl::NO_EXTEND;
+      return vixl::aarch64::NO_EXTEND;
   }
 }