diff options
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/driver/compiler_options.h | 6 | ||||
-rw-r--r-- | compiler/optimizing/code_generator.cc | 1 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 64 |
4 files changed, 77 insertions, 0 deletions
diff --git a/compiler/driver/compiler_options.h b/compiler/driver/compiler_options.h index 36ecf88199..a3957ce232 100644 --- a/compiler/driver/compiler_options.h +++ b/compiler/driver/compiler_options.h @@ -101,7 +101,13 @@ class CompilerOptions final { } bool IsJniCompilationEnabled() const { +#ifdef ART_USE_RESTRICTED_MODE + // TODO(Simulator): Support JNICompiler. + // Without the JNI compiler, GenericJNITrampoline will be used for JNI calls. + return false; +#else return CompilerFilter::IsJniCompilationEnabled(compiler_filter_); +#endif } bool IsVerificationEnabled() const { diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index e84cfcbe80..5c2e4dbc51 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -1569,6 +1569,7 @@ bool CodeGenerator::CanMoveNullCheckToUser(HNullCheck* null_check) { void CodeGenerator::MaybeRecordImplicitNullCheck(HInstruction* instr) { HNullCheck* null_check = instr->GetImplicitNullCheck(); if (null_check != nullptr) { + DCHECK(compiler_options_.GetImplicitNullChecks()); RecordPcInfo(null_check); } } diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index 31e617baec..98aa5600b4 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -200,12 +200,18 @@ class InvokePolymorphicSlowPathARM64 : public SlowPathCodeARM64 { #undef __ bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) { +#ifdef ART_USE_RESTRICTED_MODE + // TODO(Simulator): support intrinsics. + USE(invoke); + return false; +#else Dispatch(invoke); LocationSummary* res = invoke->GetLocations(); if (res == nullptr) { return false; } return res->Intrinsified(); +#endif // ART_USE_RESTRICTED_MODE } #define __ masm-> diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index ef84827653..970771424b 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -759,6 +759,51 @@ CompiledMethod* OptimizingCompiler::Emit(ArenaAllocator* allocator, return compiled_method; } +#ifdef ART_USE_RESTRICTED_MODE + +// This class acts as a filter and enables gradual enablement of ART Simulator work - we +// compile (and hence simulate) only limited types of methods. +class CompilationFilterForRestrictedMode : public HGraphDelegateVisitor { + public: + explicit CompilationFilterForRestrictedMode(HGraph* graph) + : HGraphDelegateVisitor(graph), + has_unsupported_instructions_(false) {} + + // Returns true if the graph contains instructions which are not currently supported in + // the restricted mode. + bool GraphRejected() const { return has_unsupported_instructions_; } + + private: + void VisitInstruction(HInstruction*) override { + // Currently we don't support compiling methods unless they were annotated with $compile$. + RejectGraph(); + } + void RejectGraph() { + has_unsupported_instructions_ = true; + } + + bool has_unsupported_instructions_; +}; + +// Returns whether an ArtMethod, specified by a name, should be compiled. Used in restricted +// mode. +// +// In restricted mode, the simulator will execute only those methods which are compiled; thus +// this is going to be an effective filter for methods to be simulated. +// +// TODO(Simulator): compile and simulate all the methods as in regular host mode. +bool ShouldMethodBeCompiled(HGraph* graph, const std::string& method_name) { + if (method_name.find("$compile$") != std::string::npos) { + return true; + } + + CompilationFilterForRestrictedMode filter_visitor(graph); + filter_visitor.VisitReversePostOrder(); + + return !filter_visitor.GraphRejected(); +} +#endif // ART_USE_RESTRICTED_MODE + CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, ArenaStack* arena_stack, const DexCompilationUnit& dex_compilation_unit, @@ -958,6 +1003,17 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, return nullptr; } +#ifdef ART_USE_RESTRICTED_MODE + // Check whether the method should be compiled according to the compilation filter. Note: this + // relies on a LocationSummary being available for each instruction so should take place after + // register allocation does liveness analysis. + // TODO(Simulator): support and compile all methods. + std::string method_name = dex_file.PrettyMethod(method_idx); + if (!ShouldMethodBeCompiled(graph, method_name)) { + return nullptr; + } +#endif // ART_USE_RESTRICTED_MODE + codegen->Compile(); pass_observer.DumpDisassembly(); @@ -977,6 +1033,11 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic( const DexFile& dex_file = *dex_compilation_unit.GetDexFile(); uint32_t method_idx = dex_compilation_unit.GetDexMethodIndex(); + // TODO(Simulator): Reenable compilation of intrinsics. +#ifdef ART_USE_RESTRICTED_MODE + return nullptr; +#endif // ART_USE_RESTRICTED_MODE + // Always use the Thumb-2 assembler: some runtime functionality // (like implicit stack overflow checks) assume Thumb-2. DCHECK_NE(instruction_set, InstructionSet::kArm); @@ -1149,6 +1210,8 @@ CompiledMethod* OptimizingCompiler::Compile(const dex::CodeItem* code_item, } } + // TODO(Simulator): Check for $opt$ in method name and that such method is compiled. +#ifndef ART_USE_RESTRICTED_MODE if (kIsDebugBuild && compiler_options.CompileArtTest() && IsInstructionSetSupported(compiler_options.GetInstructionSet())) { @@ -1160,6 +1223,7 @@ CompiledMethod* OptimizingCompiler::Compile(const dex::CodeItem* code_item, bool shouldCompile = method_name.find("$opt$") != std::string::npos; DCHECK_IMPLIES(compiled_method == nullptr, !shouldCompile) << "Didn't compile " << method_name; } +#endif // #ifndef ART_USE_RESTRICTED_MODE return compiled_method; } |