Move CodeGenerationData into its own header file.
To facilitate doing compiler experiments.
Test: test.py
Change-Id: I48943e6bcedb9023abf90ce6e6ea9b94866309d1
diff --git a/compiler/Android.bp b/compiler/Android.bp
index de8ab1f..d667de8 100644
--- a/compiler/Android.bp
+++ b/compiler/Android.bp
@@ -46,6 +46,7 @@
"optimizing/bounds_check_elimination.cc",
"optimizing/builder.cc",
"optimizing/cha_guard_optimization.cc",
+ "optimizing/code_generation_data.cc",
"optimizing/code_generator.cc",
"optimizing/code_generator_utils.cc",
"optimizing/code_sinking.cc",
diff --git a/compiler/optimizing/code_generation_data.cc b/compiler/optimizing/code_generation_data.cc
new file mode 100644
index 0000000..7b23d46
--- /dev/null
+++ b/compiler/optimizing/code_generation_data.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "class_linker.h"
+#include "code_generation_data.h"
+#include "code_generator.h"
+#include "intern_table.h"
+#include "mirror/object-inl.h"
+#include "runtime.h"
+
+namespace art HIDDEN {
+
+void CodeGenerationData::EmitJitRoots(
+ /*out*/std::vector<Handle<mirror::Object>>* roots) {
+ DCHECK(roots->empty());
+ roots->reserve(GetNumberOfJitRoots());
+ ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
+ size_t index = 0;
+ for (auto& entry : jit_string_roots_) {
+ // Update the `roots` with the string, and replace the address temporarily
+ // stored to the index in the table.
+ uint64_t address = entry.second;
+ roots->emplace_back(reinterpret_cast<StackReference<mirror::Object>*>(address));
+ DCHECK(roots->back() != nullptr);
+ DCHECK(roots->back()->IsString());
+ entry.second = index;
+ // Ensure the string is strongly interned. This is a requirement on how the JIT
+ // handles strings. b/32995596
+ class_linker->GetInternTable()->InternStrong(roots->back()->AsString());
+ ++index;
+ }
+ for (auto& entry : jit_class_roots_) {
+ // Update the `roots` with the class, and replace the address temporarily
+ // stored to the index in the table.
+ uint64_t address = entry.second;
+ roots->emplace_back(reinterpret_cast<StackReference<mirror::Object>*>(address));
+ DCHECK(roots->back() != nullptr);
+ DCHECK(roots->back()->IsClass());
+ entry.second = index;
+ ++index;
+ }
+}
+
+} // namespace art
diff --git a/compiler/optimizing/code_generation_data.h b/compiler/optimizing/code_generation_data.h
new file mode 100644
index 0000000..0c6b483
--- /dev/null
+++ b/compiler/optimizing/code_generation_data.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATION_DATA_H_
+#define ART_COMPILER_OPTIMIZING_CODE_GENERATION_DATA_H_
+
+#include "arch/instruction_set.h"
+#include "base/scoped_arena_allocator.h"
+#include "base/scoped_arena_containers.h"
+#include "dex/string_reference.h"
+#include "dex/type_reference.h"
+#include "handle.h"
+#include "mirror/class.h"
+#include "mirror/object.h"
+#include "mirror/string.h"
+#include "stack_map_stream.h"
+
+namespace art HIDDEN {
+
+class SlowPathCode;
+
+class CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator> {
+ public:
+ static std::unique_ptr<CodeGenerationData> Create(ArenaStack* arena_stack,
+ InstructionSet instruction_set) {
+ ScopedArenaAllocator allocator(arena_stack);
+ void* memory = allocator.Alloc<CodeGenerationData>(kArenaAllocCodeGenerator);
+ return std::unique_ptr<CodeGenerationData>(
+ ::new (memory) CodeGenerationData(std::move(allocator), instruction_set));
+ }
+
+ ScopedArenaAllocator* GetScopedAllocator() {
+ return &allocator_;
+ }
+
+ void AddSlowPath(SlowPathCode* slow_path) {
+ slow_paths_.emplace_back(std::unique_ptr<SlowPathCode>(slow_path));
+ }
+
+ ArrayRef<const std::unique_ptr<SlowPathCode>> GetSlowPaths() const {
+ return ArrayRef<const std::unique_ptr<SlowPathCode>>(slow_paths_);
+ }
+
+ StackMapStream* GetStackMapStream() { return &stack_map_stream_; }
+
+ void ReserveJitStringRoot(StringReference string_reference, Handle<mirror::String> string) {
+ jit_string_roots_.Overwrite(string_reference,
+ reinterpret_cast64<uint64_t>(string.GetReference()));
+ }
+
+ uint64_t GetJitStringRootIndex(StringReference string_reference) const {
+ return jit_string_roots_.Get(string_reference);
+ }
+
+ size_t GetNumberOfJitStringRoots() const {
+ return jit_string_roots_.size();
+ }
+
+ void ReserveJitClassRoot(TypeReference type_reference, Handle<mirror::Class> klass) {
+ jit_class_roots_.Overwrite(type_reference, reinterpret_cast64<uint64_t>(klass.GetReference()));
+ }
+
+ uint64_t GetJitClassRootIndex(TypeReference type_reference) const {
+ return jit_class_roots_.Get(type_reference);
+ }
+
+ size_t GetNumberOfJitClassRoots() const {
+ return jit_class_roots_.size();
+ }
+
+ size_t GetNumberOfJitRoots() const {
+ return GetNumberOfJitStringRoots() + GetNumberOfJitClassRoots();
+ }
+
+ void EmitJitRoots(/*out*/std::vector<Handle<mirror::Object>>* roots)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ private:
+ CodeGenerationData(ScopedArenaAllocator&& allocator, InstructionSet instruction_set)
+ : allocator_(std::move(allocator)),
+ stack_map_stream_(&allocator_, instruction_set),
+ slow_paths_(allocator_.Adapter(kArenaAllocCodeGenerator)),
+ jit_string_roots_(StringReferenceValueComparator(),
+ allocator_.Adapter(kArenaAllocCodeGenerator)),
+ jit_class_roots_(TypeReferenceValueComparator(),
+ allocator_.Adapter(kArenaAllocCodeGenerator)) {
+ slow_paths_.reserve(kDefaultSlowPathsCapacity);
+ }
+
+ static constexpr size_t kDefaultSlowPathsCapacity = 8;
+
+ ScopedArenaAllocator allocator_;
+ StackMapStream stack_map_stream_;
+ ScopedArenaVector<std::unique_ptr<SlowPathCode>> slow_paths_;
+
+ // Maps a StringReference (dex_file, string_index) to the index in the literal table.
+ // Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
+ // will compute all the indices.
+ ScopedArenaSafeMap<StringReference, uint64_t, StringReferenceValueComparator> jit_string_roots_;
+
+ // Maps a ClassReference (dex_file, type_index) to the index in the literal table.
+ // Entries are initially added with a pointer in the handle zone, and `EmitJitRoots`
+ // will compute all the indices.
+ ScopedArenaSafeMap<TypeReference, uint64_t, TypeReferenceValueComparator> jit_class_roots_;
+};
+
+} // namespace art
+
+#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATION_DATA_H_
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index cc8cb89..78aa328 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -44,6 +44,7 @@
#include "base/leb128.h"
#include "class_linker.h"
#include "class_root-inl.h"
+#include "code_generation_data.h"
#include "dex/bytecode_utils.h"
#include "dex/code_item_accessors-inl.h"
#include "graph_visualizer.h"
@@ -141,122 +142,6 @@
return true;
}
-class CodeGenerator::CodeGenerationData : public DeletableArenaObject<kArenaAllocCodeGenerator> {
- public:
- static std::unique_ptr<CodeGenerationData> Create(ArenaStack* arena_stack,
- InstructionSet instruction_set) {
- ScopedArenaAllocator allocator(arena_stack);
- void* memory = allocator.Alloc<CodeGenerationData>(kArenaAllocCodeGenerator);
- return std::unique_ptr<CodeGenerationData>(
- ::new (memory) CodeGenerationData(std::move(allocator), instruction_set));
- }
-
- ScopedArenaAllocator* GetScopedAllocator() {
- return &allocator_;
- }
-
- void AddSlowPath(SlowPathCode* slow_path) {
- slow_paths_.emplace_back(std::unique_ptr<SlowPathCode>(slow_path));
- }
-
- ArrayRef<const std::unique_ptr<SlowPathCode>> GetSlowPaths() const {
- return ArrayRef<const std::unique_ptr<SlowPathCode>>(slow_paths_);
- }
-
- StackMapStream* GetStackMapStream() { return &stack_map_stream_; }
-
- void ReserveJitStringRoot(StringReference string_reference, Handle<mirror::String> string) {
- jit_string_roots_.Overwrite(string_reference,
- reinterpret_cast64<uint64_t>(string.GetReference()));
- }
-
- uint64_t GetJitStringRootIndex(StringReference string_reference) const {
- return jit_string_roots_.Get(string_reference);
- }
-
- size_t GetNumberOfJitStringRoots() const {
- return jit_string_roots_.size();
- }
-
- void ReserveJitClassRoot(TypeReference type_reference, Handle<mirror::Class> klass) {
- jit_class_roots_.Overwrite(type_reference, reinterpret_cast64<uint64_t>(klass.GetReference()));
- }
-
- uint64_t GetJitClassRootIndex(TypeReference type_reference) const {
- return jit_class_roots_.Get(type_reference);
- }
-
- size_t GetNumberOfJitClassRoots() const {
- return jit_class_roots_.size();
- }
-
- size_t GetNumberOfJitRoots() const {
- return GetNumberOfJitStringRoots() + GetNumberOfJitClassRoots();
- }
-
- void EmitJitRoots(/*out*/std::vector<Handle<mirror::Object>>* roots)
- REQUIRES_SHARED(Locks::mutator_lock_);
-
- private:
- CodeGenerationData(ScopedArenaAllocator&& allocator, InstructionSet instruction_set)
- : allocator_(std::move(allocator)),
- stack_map_stream_(&allocator_, instruction_set),
- slow_paths_(allocator_.Adapter(kArenaAllocCodeGenerator)),
- jit_string_roots_(StringReferenceValueComparator(),
- allocator_.Adapter(kArenaAllocCodeGenerator)),
- jit_class_roots_(TypeReferenceValueComparator(),
- allocator_.Adapter(kArenaAllocCodeGenerator)) {
- slow_paths_.reserve(kDefaultSlowPathsCapacity);
- }
-
- static constexpr size_t kDefaultSlowPathsCapacity = 8;
-
- ScopedArenaAllocator allocator_;
- StackMapStream stack_map_stream_;
- ScopedArenaVector<std::unique_ptr<SlowPathCode>> slow_paths_;
-
- // Maps a StringReference (dex_file, string_index) to the index in the literal table.
- // Entries are intially added with a pointer in the handle zone, and `EmitJitRoots`
- // will compute all the indices.
- ScopedArenaSafeMap<StringReference, uint64_t, StringReferenceValueComparator> jit_string_roots_;
-
- // Maps a ClassReference (dex_file, type_index) to the index in the literal table.
- // Entries are intially added with a pointer in the handle zone, and `EmitJitRoots`
- // will compute all the indices.
- ScopedArenaSafeMap<TypeReference, uint64_t, TypeReferenceValueComparator> jit_class_roots_;
-};
-
-void CodeGenerator::CodeGenerationData::EmitJitRoots(
- /*out*/std::vector<Handle<mirror::Object>>* roots) {
- DCHECK(roots->empty());
- roots->reserve(GetNumberOfJitRoots());
- ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
- size_t index = 0;
- for (auto& entry : jit_string_roots_) {
- // Update the `roots` with the string, and replace the address temporarily
- // stored to the index in the table.
- uint64_t address = entry.second;
- roots->emplace_back(reinterpret_cast<StackReference<mirror::Object>*>(address));
- DCHECK(roots->back() != nullptr);
- DCHECK(roots->back()->IsString());
- entry.second = index;
- // Ensure the string is strongly interned. This is a requirement on how the JIT
- // handles strings. b/32995596
- class_linker->GetInternTable()->InternStrong(roots->back()->AsString());
- ++index;
- }
- for (auto& entry : jit_class_roots_) {
- // Update the `roots` with the class, and replace the address temporarily
- // stored to the index in the table.
- uint64_t address = entry.second;
- roots->emplace_back(reinterpret_cast<StackReference<mirror::Object>*>(address));
- DCHECK(roots->back() != nullptr);
- DCHECK(roots->back()->IsClass());
- entry.second = index;
- ++index;
- }
-}
-
ScopedArenaAllocator* CodeGenerator::GetScopedAllocator() {
DCHECK(code_generation_data_ != nullptr);
return code_generation_data_->GetScopedAllocator();
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 576f363..5f4f377 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -77,6 +77,7 @@
enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
class Assembler;
+class CodeGenerationData;
class CodeGenerator;
class CompilerOptions;
class StackMapStream;
@@ -852,8 +853,6 @@
DisassemblyInformation* disasm_info_;
private:
- class CodeGenerationData;
-
void InitializeCodeGenerationData();
size_t GetStackOffsetOfSavedRegister(size_t index);
void GenerateSlowPaths();