diff options
Diffstat (limited to 'compiler/optimizing')
| -rw-r--r-- | compiler/optimizing/code_generator.cc | 12 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator.h | 17 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 38 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_arm.h | 12 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 38 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86.h | 12 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 38 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86_64.h | 12 |
8 files changed, 122 insertions, 57 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 29dbd8b33d..408e13e36d 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -36,7 +36,7 @@ void CodeGenerator::CompileBaseline(CodeAllocator* allocator, bool is_leaf) { const GrowableArray<HBasicBlock*>& blocks = GetGraph()->GetBlocks(); DCHECK(blocks.Get(0) == GetGraph()->GetEntryBlock()); DCHECK(GoesToNextBlock(GetGraph()->GetEntryBlock(), blocks.Get(1))); - block_labels_.SetSize(blocks.Size()); + Initialize(); DCHECK_EQ(frame_size_, kUninitializedFrameSize); if (!is_leaf) { @@ -54,7 +54,7 @@ void CodeGenerator::CompileBaseline(CodeAllocator* allocator, bool is_leaf) { HGraphVisitor* instruction_visitor = GetInstructionVisitor(); for (size_t i = 0, e = blocks.Size(); i < e; ++i) { HBasicBlock* block = blocks.Get(i); - Bind(GetLabelOf(block)); + Bind(block); for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { HInstruction* current = it.Current(); current->Accept(location_builder); @@ -76,13 +76,13 @@ void CodeGenerator::CompileOptimized(CodeAllocator* allocator) { const GrowableArray<HBasicBlock*>& blocks = GetGraph()->GetBlocks(); DCHECK(blocks.Get(0) == GetGraph()->GetEntryBlock()); DCHECK(GoesToNextBlock(GetGraph()->GetEntryBlock(), blocks.Get(1))); - block_labels_.SetSize(blocks.Size()); + Initialize(); GenerateFrameEntry(); HGraphVisitor* instruction_visitor = GetInstructionVisitor(); for (size_t i = 0, e = blocks.Size(); i < e; ++i) { HBasicBlock* block = blocks.Get(i); - Bind(GetLabelOf(block)); + Bind(block); for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) { HInstruction* current = it.Current(); current->Accept(instruction_visitor); @@ -273,10 +273,6 @@ bool CodeGenerator::GoesToNextBlock(HBasicBlock* current, HBasicBlock* next) con return current->GetBlockId() + 1 == next->GetBlockId(); } -Label* CodeGenerator::GetLabelOf(HBasicBlock* block) const { - return block_labels_.GetRawStorage() + block->GetBlockId(); -} - CodeGenerator* CodeGenerator::Create(ArenaAllocator* allocator, HGraph* graph, InstructionSet instruction_set) { diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h index 4eba791723..7aaf99108f 100644 --- a/compiler/optimizing/code_generator.h +++ b/compiler/optimizing/code_generator.h @@ -24,13 +24,13 @@ #include "memory_region.h" #include "nodes.h" #include "stack_map_stream.h" -#include "utils/assembler.h" namespace art { static size_t constexpr kVRegSize = 4; static size_t constexpr kUninitializedFrameSize = 0; +class Assembler; class CodeGenerator; class DexCompilationUnit; class SrcMap; @@ -53,18 +53,12 @@ struct PcInfo { class SlowPathCode : public ArenaObject { public: - SlowPathCode() : entry_label_(), exit_label_() {} + SlowPathCode() {} virtual ~SlowPathCode() {} - Label* GetEntryLabel() { return &entry_label_; } - Label* GetExitLabel() { return &exit_label_; } - virtual void EmitNativeCode(CodeGenerator* codegen) = 0; private: - Label entry_label_; - Label exit_label_; - DISALLOW_COPY_AND_ASSIGN(SlowPathCode); }; @@ -80,7 +74,6 @@ class CodeGenerator : public ArenaObject { HGraph* GetGraph() const { return graph_; } - Label* GetLabelOf(HBasicBlock* block) const; bool GoesToNextBlock(HBasicBlock* current, HBasicBlock* next) const; size_t GetStackSlotOfParameter(HParameterValue* parameter) const { @@ -90,9 +83,10 @@ class CodeGenerator : public ArenaObject { + parameter->GetIndex() * kVRegSize; } + virtual void Initialize() = 0; virtual void GenerateFrameEntry() = 0; virtual void GenerateFrameExit() = 0; - virtual void Bind(Label* label) = 0; + virtual void Bind(HBasicBlock* block) = 0; virtual void Move(HInstruction* instruction, Location location, HInstruction* move_for) = 0; virtual HGraphVisitor* GetLocationBuilder() = 0; virtual HGraphVisitor* GetInstructionVisitor() = 0; @@ -167,7 +161,6 @@ class CodeGenerator : public ArenaObject { number_of_fpu_registers_(number_of_fpu_registers), number_of_register_pairs_(number_of_register_pairs), graph_(graph), - block_labels_(graph->GetArena(), 0), pc_infos_(graph->GetArena(), 32), slow_paths_(graph->GetArena(), 8), is_leaf_(true), @@ -205,8 +198,6 @@ class CodeGenerator : public ArenaObject { HGraph* const graph_; - // Labels for each block that will be compiled. - GrowableArray<Label> block_labels_; GrowableArray<PcInfo> pc_infos_; GrowableArray<SlowPathCode*> slow_paths_; diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 7a9e9c4ce9..cdee845343 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -60,7 +60,21 @@ class InvokeRuntimeCallingConvention : public CallingConvention<Register, DRegis #define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> -class NullCheckSlowPathARM : public SlowPathCode { +class SlowPathCodeARM : public SlowPathCode { + public: + SlowPathCodeARM() : entry_label_(), exit_label_() {} + + Label* GetEntryLabel() { return &entry_label_; } + Label* GetExitLabel() { return &exit_label_; } + + private: + Label entry_label_; + Label exit_label_; + + DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM); +}; + +class NullCheckSlowPathARM : public SlowPathCodeARM { public: explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {} @@ -77,7 +91,7 @@ class NullCheckSlowPathARM : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM); }; -class StackOverflowCheckSlowPathARM : public SlowPathCode { +class StackOverflowCheckSlowPathARM : public SlowPathCodeARM { public: StackOverflowCheckSlowPathARM() {} @@ -91,12 +105,13 @@ class StackOverflowCheckSlowPathARM : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM); }; -class SuspendCheckSlowPathARM : public SlowPathCode { +class SuspendCheckSlowPathARM : public SlowPathCodeARM { public: explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor) : instruction_(instruction), successor_(successor) {} virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { + CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); __ Bind(GetEntryLabel()); codegen->SaveLiveRegisters(instruction_->GetLocations()); int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pTestSuspend).Int32Value(); @@ -107,7 +122,7 @@ class SuspendCheckSlowPathARM : public SlowPathCode { if (successor_ == nullptr) { __ b(GetReturnLabel()); } else { - __ b(codegen->GetLabelOf(successor_)); + __ b(arm_codegen->GetLabelOf(successor_)); } } @@ -127,7 +142,7 @@ class SuspendCheckSlowPathARM : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM); }; -class BoundsCheckSlowPathARM : public SlowPathCode { +class BoundsCheckSlowPathARM : public SlowPathCodeARM { public: BoundsCheckSlowPathARM(HBoundsCheck* instruction, Location index_location, @@ -137,7 +152,7 @@ class BoundsCheckSlowPathARM : public SlowPathCode { length_location_(length_location) {} virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { - CodeGeneratorARM* arm_codegen = reinterpret_cast<CodeGeneratorARM*>(codegen); + CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); __ Bind(GetEntryLabel()); InvokeRuntimeCallingConvention calling_convention; arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_); @@ -205,6 +220,7 @@ void CodeGeneratorARM::RestoreCoreRegister(Location stack_location, uint32_t reg CodeGeneratorARM::CodeGeneratorARM(HGraph* graph) : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfDRegisters, kNumberOfRegisterPairs), + block_labels_(graph->GetArena(), 0), location_builder_(graph, this), instruction_visitor_(graph, this), move_resolver_(graph->GetArena(), this), @@ -313,7 +329,7 @@ void CodeGeneratorARM::GenerateFrameEntry() { bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm); if (!skip_overflow_check) { if (kExplicitStackOverflowCheck) { - SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM(); + SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM(); AddSlowPath(slow_path); __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value()); @@ -339,8 +355,8 @@ void CodeGeneratorARM::GenerateFrameExit() { __ PopList(1 << PC | 1 << R6 | 1 << R7); } -void CodeGeneratorARM::Bind(Label* label) { - __ Bind(label); +void CodeGeneratorARM::Bind(HBasicBlock* block) { + __ Bind(GetLabelOf(block)); } Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const { @@ -1365,7 +1381,7 @@ void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) { } void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) { - SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction); + SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction); codegen_->AddSlowPath(slow_path); LocationSummary* locations = instruction->GetLocations(); @@ -1611,7 +1627,7 @@ void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) { void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) { LocationSummary* locations = instruction->GetLocations(); - SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM( + SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM( instruction, locations->InAt(0), locations->InAt(1)); codegen_->AddSlowPath(slow_path); diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h index 874db0fd54..7c063f1728 100644 --- a/compiler/optimizing/code_generator_arm.h +++ b/compiler/optimizing/code_generator_arm.h @@ -140,7 +140,7 @@ class CodeGeneratorARM : public CodeGenerator { virtual void GenerateFrameEntry() OVERRIDE; virtual void GenerateFrameExit() OVERRIDE; - virtual void Bind(Label* label) OVERRIDE; + virtual void Bind(HBasicBlock* block) OVERRIDE; virtual void Move(HInstruction* instruction, Location location, HInstruction* move_for) OVERRIDE; virtual void SaveCoreRegister(Location stack_location, uint32_t reg_id) OVERRIDE; virtual void RestoreCoreRegister(Location stack_location, uint32_t reg_id) OVERRIDE; @@ -187,7 +187,17 @@ class CodeGeneratorARM : public CodeGenerator { // Emit a write barrier. void MarkGCCard(Register temp, Register card, Register object, Register value); + Label* GetLabelOf(HBasicBlock* block) const { + return block_labels_.GetRawStorage() + block->GetBlockId(); + } + + virtual void Initialize() OVERRIDE { + block_labels_.SetSize(GetGraph()->GetBlocks().Size()); + } + private: + // Labels for each block that will be compiled. + GrowableArray<Label> block_labels_; LocationsBuilderARM location_builder_; InstructionCodeGeneratorARM instruction_visitor_; ParallelMoveResolverARM move_resolver_; diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 5fc389569b..98d3ad4185 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -56,7 +56,21 @@ class InvokeRuntimeCallingConvention : public CallingConvention<Register, XmmReg #define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())-> -class NullCheckSlowPathX86 : public SlowPathCode { +class SlowPathCodeX86 : public SlowPathCode { + public: + SlowPathCodeX86() : entry_label_(), exit_label_() {} + + Label* GetEntryLabel() { return &entry_label_; } + Label* GetExitLabel() { return &exit_label_; } + + private: + Label entry_label_; + Label exit_label_; + + DISALLOW_COPY_AND_ASSIGN(SlowPathCodeX86); +}; + +class NullCheckSlowPathX86 : public SlowPathCodeX86 { public: explicit NullCheckSlowPathX86(HNullCheck* instruction) : instruction_(instruction) {} @@ -71,7 +85,7 @@ class NullCheckSlowPathX86 : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86); }; -class StackOverflowCheckSlowPathX86 : public SlowPathCode { +class StackOverflowCheckSlowPathX86 : public SlowPathCodeX86 { public: StackOverflowCheckSlowPathX86() {} @@ -86,7 +100,7 @@ class StackOverflowCheckSlowPathX86 : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86); }; -class BoundsCheckSlowPathX86 : public SlowPathCode { +class BoundsCheckSlowPathX86 : public SlowPathCodeX86 { public: BoundsCheckSlowPathX86(HBoundsCheck* instruction, Location index_location, @@ -94,7 +108,7 @@ class BoundsCheckSlowPathX86 : public SlowPathCode { : instruction_(instruction), index_location_(index_location), length_location_(length_location) {} virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { - CodeGeneratorX86* x86_codegen = reinterpret_cast<CodeGeneratorX86*>(codegen); + CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen); __ Bind(GetEntryLabel()); InvokeRuntimeCallingConvention calling_convention; x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_); @@ -111,12 +125,13 @@ class BoundsCheckSlowPathX86 : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86); }; -class SuspendCheckSlowPathX86 : public SlowPathCode { +class SuspendCheckSlowPathX86 : public SlowPathCodeX86 { public: explicit SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor) : instruction_(instruction), successor_(successor) {} virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { + CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen); __ Bind(GetEntryLabel()); codegen->SaveLiveRegisters(instruction_->GetLocations()); __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pTestSuspend))); @@ -125,7 +140,7 @@ class SuspendCheckSlowPathX86 : public SlowPathCode { if (successor_ == nullptr) { __ jmp(GetReturnLabel()); } else { - __ jmp(codegen->GetLabelOf(successor_)); + __ jmp(x86_codegen->GetLabelOf(successor_)); } } @@ -177,6 +192,7 @@ void CodeGeneratorX86::RestoreCoreRegister(Location stack_location, uint32_t reg CodeGeneratorX86::CodeGeneratorX86(HGraph* graph) : CodeGenerator(graph, kNumberOfCpuRegisters, kNumberOfXmmRegisters, kNumberOfRegisterPairs), + block_labels_(graph->GetArena(), 0), location_builder_(graph, this), instruction_visitor_(graph, this), move_resolver_(graph->GetArena(), this) {} @@ -276,7 +292,7 @@ void CodeGeneratorX86::GenerateFrameEntry() { __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize)); if (!skip_overflow_check && kExplicitStackOverflowCheck) { - SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86(); + SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86(); AddSlowPath(slow_path); __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>())); @@ -290,8 +306,8 @@ void CodeGeneratorX86::GenerateFrameExit() { __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize)); } -void CodeGeneratorX86::Bind(Label* label) { - __ Bind(label); +void CodeGeneratorX86::Bind(HBasicBlock* block) { + __ Bind(GetLabelOf(block)); } void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) { @@ -1375,7 +1391,7 @@ void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) { } void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) { - SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction); + SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction); codegen_->AddSlowPath(slow_path); LocationSummary* locations = instruction->GetLocations(); @@ -1677,7 +1693,7 @@ void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) { void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) { LocationSummary* locations = instruction->GetLocations(); - SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86( + SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86( instruction, locations->InAt(0), locations->InAt(1)); codegen_->AddSlowPath(slow_path); diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h index a1a72a2bd7..aa5fee00e0 100644 --- a/compiler/optimizing/code_generator_x86.h +++ b/compiler/optimizing/code_generator_x86.h @@ -142,7 +142,7 @@ class CodeGeneratorX86 : public CodeGenerator { virtual void GenerateFrameEntry() OVERRIDE; virtual void GenerateFrameExit() OVERRIDE; - virtual void Bind(Label* label) OVERRIDE; + virtual void Bind(HBasicBlock* block) OVERRIDE; virtual void Move(HInstruction* instruction, Location location, HInstruction* move_for) OVERRIDE; virtual void SaveCoreRegister(Location stack_location, uint32_t reg_id) OVERRIDE; virtual void RestoreCoreRegister(Location stack_location, uint32_t reg_id) OVERRIDE; @@ -189,7 +189,17 @@ class CodeGeneratorX86 : public CodeGenerator { // Emit a write barrier. void MarkGCCard(Register temp, Register card, Register object, Register value); + Label* GetLabelOf(HBasicBlock* block) const { + return block_labels_.GetRawStorage() + block->GetBlockId(); + } + + virtual void Initialize() OVERRIDE { + block_labels_.SetSize(GetGraph()->GetBlocks().Size()); + } + private: + // Labels for each block that will be compiled. + GrowableArray<Label> block_labels_; LocationsBuilderX86 location_builder_; InstructionCodeGeneratorX86 instruction_visitor_; ParallelMoveResolverX86 move_resolver_; diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 5781e7e07a..059ff3fa79 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -60,7 +60,21 @@ class InvokeRuntimeCallingConvention : public CallingConvention<Register, FloatR #define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())-> -class NullCheckSlowPathX86_64 : public SlowPathCode { +class SlowPathCodeX86_64 : public SlowPathCode { + public: + SlowPathCodeX86_64() : entry_label_(), exit_label_() {} + + Label* GetEntryLabel() { return &entry_label_; } + Label* GetExitLabel() { return &exit_label_; } + + private: + Label entry_label_; + Label exit_label_; + + DISALLOW_COPY_AND_ASSIGN(SlowPathCodeX86_64); +}; + +class NullCheckSlowPathX86_64 : public SlowPathCodeX86_64 { public: explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : instruction_(instruction) {} @@ -76,7 +90,7 @@ class NullCheckSlowPathX86_64 : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64); }; -class StackOverflowCheckSlowPathX86_64 : public SlowPathCode { +class StackOverflowCheckSlowPathX86_64 : public SlowPathCodeX86_64 { public: StackOverflowCheckSlowPathX86_64() {} @@ -92,12 +106,13 @@ class StackOverflowCheckSlowPathX86_64 : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86_64); }; -class SuspendCheckSlowPathX86_64 : public SlowPathCode { +class SuspendCheckSlowPathX86_64 : public SlowPathCodeX86_64 { public: explicit SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor) : instruction_(instruction), successor_(successor) {} virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { + CodeGeneratorX86_64* x64_codegen = down_cast<CodeGeneratorX86_64*>(codegen); __ Bind(GetEntryLabel()); codegen->SaveLiveRegisters(instruction_->GetLocations()); __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pTestSuspend), true)); @@ -106,7 +121,7 @@ class SuspendCheckSlowPathX86_64 : public SlowPathCode { if (successor_ == nullptr) { __ jmp(GetReturnLabel()); } else { - __ jmp(codegen->GetLabelOf(successor_)); + __ jmp(x64_codegen->GetLabelOf(successor_)); } } @@ -123,7 +138,7 @@ class SuspendCheckSlowPathX86_64 : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64); }; -class BoundsCheckSlowPathX86_64 : public SlowPathCode { +class BoundsCheckSlowPathX86_64 : public SlowPathCodeX86_64 { public: BoundsCheckSlowPathX86_64(HBoundsCheck* instruction, Location index_location, @@ -133,7 +148,7 @@ class BoundsCheckSlowPathX86_64 : public SlowPathCode { length_location_(length_location) {} virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { - CodeGeneratorX86_64* x64_codegen = reinterpret_cast<CodeGeneratorX86_64*>(codegen); + CodeGeneratorX86_64* x64_codegen = down_cast<CodeGeneratorX86_64*>(codegen); __ Bind(GetEntryLabel()); InvokeRuntimeCallingConvention calling_convention; x64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_); @@ -186,6 +201,7 @@ void CodeGeneratorX86_64::RestoreCoreRegister(Location stack_location, uint32_t CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph) : CodeGenerator(graph, kNumberOfCpuRegisters, kNumberOfFloatRegisters, 0), + block_labels_(graph->GetArena(), 0), location_builder_(graph, this), instruction_visitor_(graph, this), move_resolver_(graph->GetArena(), this) {} @@ -266,7 +282,7 @@ void CodeGeneratorX86_64::GenerateFrameEntry() { Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize)); if (!skip_overflow_check && kExplicitStackOverflowCheck) { - SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86_64(); + SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86_64(); AddSlowPath(slow_path); __ gs()->cmpq(CpuRegister(RSP), @@ -282,8 +298,8 @@ void CodeGeneratorX86_64::GenerateFrameExit() { Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize)); } -void CodeGeneratorX86_64::Bind(Label* label) { - __ Bind(label); +void CodeGeneratorX86_64::Bind(HBasicBlock* block) { + __ Bind(GetLabelOf(block)); } void InstructionCodeGeneratorX86_64::LoadCurrentMethod(CpuRegister reg) { @@ -1254,7 +1270,7 @@ void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) { } void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) { - SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction); + SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction); codegen_->AddSlowPath(slow_path); LocationSummary* locations = instruction->GetLocations(); @@ -1526,7 +1542,7 @@ void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) { void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) { LocationSummary* locations = instruction->GetLocations(); - SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64( + SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64( instruction, locations->InAt(0), locations->InAt(1)); codegen_->AddSlowPath(slow_path); diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index 288f3f61f9..5ac0189b55 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -144,7 +144,7 @@ class CodeGeneratorX86_64 : public CodeGenerator { virtual void GenerateFrameEntry() OVERRIDE; virtual void GenerateFrameExit() OVERRIDE; - virtual void Bind(Label* label) OVERRIDE; + virtual void Bind(HBasicBlock* block) OVERRIDE; virtual void Move(HInstruction* instruction, Location location, HInstruction* move_for) OVERRIDE; virtual void SaveCoreRegister(Location stack_location, uint32_t reg_id) OVERRIDE; virtual void RestoreCoreRegister(Location stack_location, uint32_t reg_id) OVERRIDE; @@ -188,7 +188,17 @@ class CodeGeneratorX86_64 : public CodeGenerator { // Helper method to move a value between two locations. void Move(Location destination, Location source); + Label* GetLabelOf(HBasicBlock* block) const { + return block_labels_.GetRawStorage() + block->GetBlockId(); + } + + virtual void Initialize() OVERRIDE { + block_labels_.SetSize(GetGraph()->GetBlocks().Size()); + } + private: + // Labels for each block that will be compiled. + GrowableArray<Label> block_labels_; LocationsBuilderX86_64 location_builder_; InstructionCodeGeneratorX86_64 instruction_visitor_; ParallelMoveResolverX86_64 move_resolver_; |