Re-apply: Initial check-in of an optimizing compiler.

The classes and the names are very much inspired by V8/Dart.
It currently only supports the RETURN_VOID dex instruction,
and there is a pretty printer to check if the building of the
graph is correct.

Change-Id: I28e125dfee86ae6ec9b3fec6aa1859523b92a893
diff --git a/compiler/utils/allocation.h b/compiler/utils/allocation.h
new file mode 100644
index 0000000..07cd397
--- /dev/null
+++ b/compiler/utils/allocation.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_COMPILER_UTILS_ALLOCATION_H_
+#define ART_COMPILER_UTILS_ALLOCATION_H_
+
+#include "arena_allocator.h"
+#include "base/logging.h"
+
+namespace art {
+
+class ArenaObject {
+ public:
+  // Allocate a new ArenaObject of 'size' bytes in the Arena.
+  void* operator new(size_t size, ArenaAllocator* allocator) {
+    return allocator->Alloc(size, ArenaAllocator::kAllocMisc);
+  }
+
+  void operator delete(void*, size_t) {
+    LOG(FATAL) << "UNREACHABLE";
+  }
+};
+
+class ValueObject {
+ public:
+  void* operator new(size_t size) {
+    LOG(FATAL) << "UNREACHABLE";
+    abort();
+  }
+  void operator delete(void*, size_t) {
+    LOG(FATAL) << "UNREACHABLE";
+  }
+};
+
+}  // namespace art
+
+#endif  // ART_COMPILER_UTILS_ALLOCATION_H_
diff --git a/compiler/utils/arena_allocator.cc b/compiler/utils/arena_allocator.cc
new file mode 100644
index 0000000..ec41293
--- /dev/null
+++ b/compiler/utils/arena_allocator.cc
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "arena_allocator.h"
+#include "base/logging.h"
+#include "base/mutex.h"
+#include "thread-inl.h"
+#include <memcheck/memcheck.h>
+
+namespace art {
+
+// Memmap is a bit slower than malloc according to my measurements.
+static constexpr bool kUseMemMap = false;
+static constexpr bool kUseMemSet = true && kUseMemMap;
+static constexpr size_t kValgrindRedZoneBytes = 8;
+constexpr size_t Arena::kDefaultSize;
+
+static const char* alloc_names[ArenaAllocator::kNumAllocKinds] = {
+  "Misc       ",
+  "BasicBlock ",
+  "LIR        ",
+  "MIR        ",
+  "DataFlow   ",
+  "GrowList   ",
+  "GrowBitMap ",
+  "Dalvik2SSA ",
+  "DebugInfo  ",
+  "Successor  ",
+  "RegAlloc   ",
+  "Data       ",
+  "Preds      ",
+};
+
+Arena::Arena(size_t size)
+    : bytes_allocated_(0),
+      map_(nullptr),
+      next_(nullptr) {
+  if (kUseMemMap) {
+    std::string error_msg;
+    map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
+                                &error_msg);
+    CHECK(map_ != nullptr) << error_msg;
+    memory_ = map_->Begin();
+    size_ = map_->Size();
+  } else {
+    memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
+    size_ = size;
+  }
+}
+
+Arena::~Arena() {
+  if (kUseMemMap) {
+    delete map_;
+  } else {
+    free(reinterpret_cast<void*>(memory_));
+  }
+}
+
+void Arena::Reset() {
+  if (bytes_allocated_) {
+    if (kUseMemSet || !kUseMemMap) {
+      memset(Begin(), 0, bytes_allocated_);
+    } else {
+      madvise(Begin(), bytes_allocated_, MADV_DONTNEED);
+    }
+    bytes_allocated_ = 0;
+  }
+}
+
+ArenaPool::ArenaPool()
+    : lock_("Arena pool lock"),
+      free_arenas_(nullptr) {
+}
+
+ArenaPool::~ArenaPool() {
+  while (free_arenas_ != nullptr) {
+    auto* arena = free_arenas_;
+    free_arenas_ = free_arenas_->next_;
+    delete arena;
+  }
+}
+
+Arena* ArenaPool::AllocArena(size_t size) {
+  Thread* self = Thread::Current();
+  Arena* ret = nullptr;
+  {
+    MutexLock lock(self, lock_);
+    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
+      ret = free_arenas_;
+      free_arenas_ = free_arenas_->next_;
+    }
+  }
+  if (ret == nullptr) {
+    ret = new Arena(size);
+  }
+  ret->Reset();
+  return ret;
+}
+
+void ArenaPool::FreeArena(Arena* arena) {
+  Thread* self = Thread::Current();
+  if (UNLIKELY(RUNNING_ON_VALGRIND)) {
+    VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
+  }
+  {
+    MutexLock lock(self, lock_);
+    arena->next_ = free_arenas_;
+    free_arenas_ = arena;
+  }
+}
+
+size_t ArenaAllocator::BytesAllocated() const {
+  size_t total = 0;
+  for (int i = 0; i < kNumAllocKinds; i++) {
+    total += alloc_stats_[i];
+  }
+  return total;
+}
+
+ArenaAllocator::ArenaAllocator(ArenaPool* pool)
+  : pool_(pool),
+    begin_(nullptr),
+    end_(nullptr),
+    ptr_(nullptr),
+    arena_head_(nullptr),
+    num_allocations_(0),
+    running_on_valgrind_(RUNNING_ON_VALGRIND) {
+  memset(&alloc_stats_[0], 0, sizeof(alloc_stats_));
+}
+
+void ArenaAllocator::UpdateBytesAllocated() {
+  if (arena_head_ != nullptr) {
+    // Update how many bytes we have allocated into the arena so that the arena pool knows how
+    // much memory to zero out.
+    arena_head_->bytes_allocated_ = ptr_ - begin_;
+  }
+}
+
+void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
+  size_t rounded_bytes = (bytes + 3 + kValgrindRedZoneBytes) & ~3;
+  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
+    // Obtain a new block.
+    ObtainNewArenaForAllocation(rounded_bytes);
+    if (UNLIKELY(ptr_ == nullptr)) {
+      return nullptr;
+    }
+  }
+  if (kCountAllocations) {
+    alloc_stats_[kind] += rounded_bytes;
+    ++num_allocations_;
+  }
+  uint8_t* ret = ptr_;
+  ptr_ += rounded_bytes;
+  // Check that the memory is already zeroed out.
+  for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
+    CHECK_EQ(*ptr, 0U);
+  }
+  VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
+  return ret;
+}
+
+ArenaAllocator::~ArenaAllocator() {
+  // Reclaim all the arenas by giving them back to the thread pool.
+  UpdateBytesAllocated();
+  while (arena_head_ != nullptr) {
+    Arena* arena = arena_head_;
+    arena_head_ = arena_head_->next_;
+    pool_->FreeArena(arena);
+  }
+}
+
+void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
+  UpdateBytesAllocated();
+  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
+  new_arena->next_ = arena_head_;
+  arena_head_ = new_arena;
+  // Update our internal data structures.
+  ptr_ = begin_ = new_arena->Begin();
+  end_ = new_arena->End();
+}
+
+// Dump memory usage stats.
+void ArenaAllocator::DumpMemStats(std::ostream& os) const {
+  size_t malloc_bytes = 0;
+  // Start out with how many lost bytes we have in the arena we are currently allocating into.
+  size_t lost_bytes(end_ - ptr_);
+  size_t num_arenas = 0;
+  for (Arena* arena = arena_head_; arena != nullptr; arena = arena->next_) {
+    malloc_bytes += arena->Size();
+    if (arena != arena_head_) {
+      lost_bytes += arena->RemainingSpace();
+    }
+    ++num_arenas;
+  }
+  const size_t bytes_allocated = BytesAllocated();
+  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
+     << ", lost: " << lost_bytes << "\n";
+  if (num_allocations_ != 0) {
+    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
+       << num_allocations_ << ", avg size: " << bytes_allocated / num_allocations_ << "\n";
+  }
+  os << "===== Allocation by kind\n";
+  for (int i = 0; i < kNumAllocKinds; i++) {
+      os << alloc_names[i] << std::setw(10) << alloc_stats_[i] << "\n";
+  }
+}
+
+}  // namespace art
diff --git a/compiler/utils/arena_allocator.h b/compiler/utils/arena_allocator.h
new file mode 100644
index 0000000..56cedfe
--- /dev/null
+++ b/compiler/utils/arena_allocator.h
@@ -0,0 +1,157 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_COMPILER_UTILS_ARENA_ALLOCATOR_H_
+#define ART_COMPILER_UTILS_ARENA_ALLOCATOR_H_
+
+#include <stdint.h>
+#include <stddef.h>
+
+#include "base/mutex.h"
+#include "mem_map.h"
+
+namespace art {
+
+class Arena;
+class ArenaPool;
+class ArenaAllocator;
+
+class Arena {
+ public:
+  static constexpr size_t kDefaultSize = 128 * KB;
+  explicit Arena(size_t size = kDefaultSize);
+  ~Arena();
+  void Reset();
+  uint8_t* Begin() {
+    return memory_;
+  }
+
+  uint8_t* End() {
+    return memory_ + size_;
+  }
+
+  size_t Size() const {
+    return size_;
+  }
+
+  size_t RemainingSpace() const {
+    return Size() - bytes_allocated_;
+  }
+
+ private:
+  size_t bytes_allocated_;
+  uint8_t* memory_;
+  size_t size_;
+  MemMap* map_;
+  Arena* next_;
+  friend class ArenaPool;
+  friend class ArenaAllocator;
+  DISALLOW_COPY_AND_ASSIGN(Arena);
+};
+
+class ArenaPool {
+ public:
+  ArenaPool();
+  ~ArenaPool();
+  Arena* AllocArena(size_t size);
+  void FreeArena(Arena* arena);
+
+ private:
+  Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
+  Arena* free_arenas_ GUARDED_BY(lock_);
+  DISALLOW_COPY_AND_ASSIGN(ArenaPool);
+};
+
+class ArenaAllocator {
+ public:
+  // Type of allocation for memory tuning.
+  enum ArenaAllocKind {
+    kAllocMisc,
+    kAllocBB,
+    kAllocLIR,
+    kAllocMIR,
+    kAllocDFInfo,
+    kAllocGrowableArray,
+    kAllocGrowableBitMap,
+    kAllocDalvikToSSAMap,
+    kAllocDebugInfo,
+    kAllocSuccessor,
+    kAllocRegAlloc,
+    kAllocData,
+    kAllocPredecessors,
+    kNumAllocKinds
+  };
+
+  static constexpr bool kCountAllocations = false;
+
+  explicit ArenaAllocator(ArenaPool* pool);
+  ~ArenaAllocator();
+
+  // Returns zeroed memory.
+  void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
+    if (UNLIKELY(running_on_valgrind_)) {
+      return AllocValgrind(bytes, kind);
+    }
+    bytes = (bytes + 3) & ~3;
+    if (UNLIKELY(ptr_ + bytes > end_)) {
+      // Obtain a new block.
+      ObtainNewArenaForAllocation(bytes);
+      if (UNLIKELY(ptr_ == nullptr)) {
+        return nullptr;
+      }
+    }
+    if (kCountAllocations) {
+      alloc_stats_[kind] += bytes;
+      ++num_allocations_;
+    }
+    uint8_t* ret = ptr_;
+    ptr_ += bytes;
+    return ret;
+  }
+
+  void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+  void ObtainNewArenaForAllocation(size_t allocation_size);
+  size_t BytesAllocated() const;
+  void DumpMemStats(std::ostream& os) const;
+
+ private:
+  void UpdateBytesAllocated();
+
+  ArenaPool* pool_;
+  uint8_t* begin_;
+  uint8_t* end_;
+  uint8_t* ptr_;
+  Arena* arena_head_;
+  size_t num_allocations_;
+  size_t alloc_stats_[kNumAllocKinds];  // Bytes used by various allocation kinds.
+  bool running_on_valgrind_;
+
+  DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
+};  // ArenaAllocator
+
+struct MemStats {
+   public:
+     void Dump(std::ostream& os) const {
+       arena_.DumpMemStats(os);
+     }
+     explicit MemStats(const ArenaAllocator &arena) : arena_(arena) {}
+  private:
+    const ArenaAllocator &arena_;
+};  // MemStats
+
+}  // namespace art
+
+#endif  // ART_COMPILER_UTILS_ARENA_ALLOCATOR_H_
diff --git a/compiler/utils/arena_allocator_test.cc b/compiler/utils/arena_allocator_test.cc
new file mode 100644
index 0000000..b76fe74
--- /dev/null
+++ b/compiler/utils/arena_allocator_test.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "dex/arena_bit_vector.h"
+#include "gtest/gtest.h"
+#include "utils/arena_allocator.h"
+
+namespace art {
+
+TEST(ArenaAllocator, Test) {
+  ArenaPool pool;
+  ArenaAllocator arena(&pool);
+  ArenaBitVector bv(&arena, 10, true);
+  bv.SetBit(5);
+  EXPECT_EQ(1U, bv.GetStorageSize());
+  bv.SetBit(35);
+  EXPECT_EQ(2U, bv.GetStorageSize());
+}
+
+}  // namespace art
diff --git a/compiler/utils/growable_array.h b/compiler/utils/growable_array.h
new file mode 100644
index 0000000..b591870
--- /dev/null
+++ b/compiler/utils/growable_array.h
@@ -0,0 +1,186 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_COMPILER_UTILS_GROWABLE_ARRAY_H_
+#define ART_COMPILER_UTILS_GROWABLE_ARRAY_H_
+
+#include <stdint.h>
+#include <stddef.h>
+#include "arena_allocator.h"
+
+namespace art {
+
+// Type of growable list for memory tuning.
+enum OatListKind {
+  kGrowableArrayMisc = 0,
+  kGrowableArrayBlockList,
+  kGrowableArraySSAtoDalvikMap,
+  kGrowableArrayDfsOrder,
+  kGrowableArrayDfsPostOrder,
+  kGrowableArrayDomPostOrderTraversal,
+  kGrowableArrayThrowLaunchPads,
+  kGrowableArraySuspendLaunchPads,
+  kGrowableArraySwitchTables,
+  kGrowableArrayFillArrayData,
+  kGrowableArraySuccessorBlocks,
+  kGrowableArrayPredecessors,
+  kGrowableArraySlowPaths,
+  kGNumListKinds
+};
+
+template<typename T>
+class GrowableArray {
+  public:
+    class Iterator {
+      public:
+        explicit Iterator(GrowableArray* g_list)
+          : idx_(0),
+            g_list_(g_list) {}
+
+        // NOTE: returns 0/NULL when no next.
+        // TODO: redo to make usage consistent with other iterators.
+        T Next() {
+          if (idx_ >= g_list_->Size()) {
+            return 0;
+          } else {
+            return g_list_->Get(idx_++);
+          }
+        }
+
+        void Reset() {
+          idx_ = 0;
+        }
+
+      private:
+        size_t idx_;
+        GrowableArray* const g_list_;
+    };
+
+    GrowableArray(ArenaAllocator* arena, size_t init_length, OatListKind kind = kGrowableArrayMisc)
+      : arena_(arena),
+        num_allocated_(init_length),
+        num_used_(0),
+        kind_(kind) {
+      elem_list_ = static_cast<T*>(arena_->Alloc(sizeof(T) * init_length,
+                                                 ArenaAllocator::kAllocGrowableArray));
+    };
+
+
+    // Expand the list size to at least new length.
+    void Resize(size_t new_length) {
+      if (new_length <= num_allocated_) return;
+      // If it's a small list double the size, else grow 1.5x.
+      size_t target_length =
+          (num_allocated_ < 128) ? num_allocated_ << 1 : num_allocated_ + (num_allocated_ >> 1);
+      if (new_length > target_length) {
+         target_length = new_length;
+      }
+      T* new_array = static_cast<T*>(arena_->Alloc(sizeof(T) * target_length,
+                                                   ArenaAllocator::kAllocGrowableArray));
+      memcpy(new_array, elem_list_, sizeof(T) * num_allocated_);
+      num_allocated_ = target_length;
+      elem_list_ = new_array;
+    };
+
+    // NOTE: does not return storage, just resets use count.
+    void Reset() {
+      num_used_ = 0;
+    }
+
+    // Insert an element to the end of a list, resizing if necessary.
+    void Insert(T elem) {
+      if (num_used_ == num_allocated_) {
+        Resize(num_used_ + 1);
+      }
+      elem_list_[num_used_++] = elem;
+    }
+
+    void InsertAt(size_t index, T elem) {
+      DCHECK(index <= Size());
+      Insert(elem);
+      for (size_t i = Size() - 1; i > index; --i) {
+        elem_list_[i] = elem_list_[i - 1];
+      }
+      elem_list_[index] = elem;
+    }
+
+    void Add(T elem) {
+      Insert(elem);
+    }
+
+    T Get(size_t index) const {
+      DCHECK_LT(index, num_used_);
+      return elem_list_[index];
+    };
+
+    // Overwrite existing element at position index.  List must be large enough.
+    void Put(size_t index, T elem) {
+      DCHECK_LT(index, num_used_);
+      elem_list_[index] = elem;
+    }
+
+    void Increment(size_t index) {
+      DCHECK_LT(index, num_used_);
+      elem_list_[index]++;
+    }
+
+    /*
+     * Remove an existing element from list.  If there are more than one copy
+     * of the element, only the first one encountered will be deleted.
+     */
+    // TODO: consider renaming this.
+    void Delete(T element) {
+      bool found = false;
+      for (size_t i = 0; i < num_used_ - 1; i++) {
+        if (!found && elem_list_[i] == element) {
+          found = true;
+        }
+        if (found) {
+          elem_list_[i] = elem_list_[i+1];
+        }
+      }
+      // We should either have found the element, or it was the last (unscanned) element.
+      DCHECK(found || (element == elem_list_[num_used_ - 1]));
+      num_used_--;
+    };
+
+    size_t GetNumAllocated() const { return num_allocated_; }
+
+    size_t Size() const { return num_used_; }
+
+    void SetSize(size_t new_size) {
+      Resize(new_size);
+      num_used_ = new_size;
+    }
+
+    T* GetRawStorage() const { return elem_list_; }
+
+    static void* operator new(size_t size, ArenaAllocator* arena) {
+      return arena->Alloc(sizeof(GrowableArray<T>), ArenaAllocator::kAllocGrowableArray);
+    };
+    static void operator delete(void* p) {}  // Nop.
+
+  private:
+    ArenaAllocator* const arena_;
+    size_t num_allocated_;
+    size_t num_used_;
+    OatListKind kind_;
+    T* elem_list_;
+};
+
+}  // namespace art
+
+#endif  // ART_COMPILER_UTILS_GROWABLE_ARRAY_H_