summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build/Android.common_build.mk5
-rw-r--r--build/art.go11
-rw-r--r--compiler/driver/compiler_options.h6
-rw-r--r--compiler/optimizing/code_generator.cc1
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc6
-rw-r--r--compiler/optimizing/optimizing_compiler.cc64
-rw-r--r--dex2oat/dex2oat.cc6
-rw-r--r--runtime/arch/arm/native_entrypoints_arm.S5
-rw-r--r--runtime/arch/arm64/entrypoints_init_arm64.cc13
-rw-r--r--runtime/arch/arm64/native_entrypoints_arm64.S5
-rw-r--r--runtime/arch/riscv64/native_entrypoints_riscv64.S5
-rw-r--r--runtime/arch/x86/native_entrypoints_x86.S5
-rw-r--r--runtime/arch/x86_64/native_entrypoints_x86_64.S6
-rw-r--r--runtime/art_method.h9
-rw-r--r--runtime/entrypoints/quick/quick_trampoline_entrypoints.cc11
-rw-r--r--runtime/gc/collector_type.h2
-rw-r--r--runtime/interpreter/mterp/nterp.cc7
-rw-r--r--runtime/runtime.cc73
-rw-r--r--runtime/runtime.h2
-rw-r--r--runtime/runtime_globals.h5
-rw-r--r--test/knownfailures.json14
-rwxr-xr-xtest/run-test5
22 files changed, 227 insertions, 39 deletions
diff --git a/build/Android.common_build.mk b/build/Android.common_build.mk
index f5a95fa0cf..ad551a13e9 100644
--- a/build/Android.common_build.mk
+++ b/build/Android.common_build.mk
@@ -46,8 +46,13 @@ ifeq ($(ART_BUILD_HOST_DEBUG),false)
$(info Disabling ART_BUILD_HOST_DEBUG)
endif
+ifeq ($(ART_USE_RESTRICTED_MODE),true)
+# TODO(Simulator): Support read barriers.
+ART_USE_READ_BARRIER := false
+else
# Enable the read barrier by default.
ART_USE_READ_BARRIER ?= true
+endif
ART_CPP_EXTENSION := .cc
diff --git a/build/art.go b/build/art.go
index 3aaa3eee21..c4df20d3d7 100644
--- a/build/art.go
+++ b/build/art.go
@@ -44,6 +44,14 @@ func globalFlags(ctx android.LoadHookContext) ([]string, []string) {
tlab = true
}
+ if ctx.Config().IsEnvTrue("ART_USE_RESTRICTED_MODE") {
+ cflags = append(cflags, "-DART_USE_RESTRICTED_MODE=1")
+ asflags = append(asflags, "-DART_USE_RESTRICTED_MODE=1")
+
+ // TODO(Simulator): Support other GC types.
+ gcType = "MS"
+ }
+
cflags = append(cflags, "-DART_DEFAULT_GC_TYPE_IS_"+gcType)
if ctx.Config().IsEnvTrue("ART_HEAP_POISONING") {
@@ -56,7 +64,8 @@ func globalFlags(ctx android.LoadHookContext) ([]string, []string) {
// TODO: deprecate and then eventually remove ART_USE_GENERATIONAL_CC in favor of
// ART_USE_GENERATIONAL_GC
- if !ctx.Config().IsEnvFalse("ART_USE_READ_BARRIER") && ctx.Config().ArtUseReadBarrier() {
+ if !ctx.Config().IsEnvFalse("ART_USE_READ_BARRIER") && ctx.Config().ArtUseReadBarrier() &&
+ !ctx.Config().IsEnvTrue("ART_USE_RESTRICTED_MODE") {
// Used to change the read barrier type. Valid values are BAKER, TABLELOOKUP.
// The default is BAKER.
barrierType := ctx.Config().GetenvWithDefault("ART_READ_BARRIER_TYPE", "BAKER")
diff --git a/compiler/driver/compiler_options.h b/compiler/driver/compiler_options.h
index 36ecf88199..a3957ce232 100644
--- a/compiler/driver/compiler_options.h
+++ b/compiler/driver/compiler_options.h
@@ -101,7 +101,13 @@ class CompilerOptions final {
}
bool IsJniCompilationEnabled() const {
+#ifdef ART_USE_RESTRICTED_MODE
+ // TODO(Simulator): Support JNICompiler.
+ // Without the JNI compiler, GenericJNITrampoline will be used for JNI calls.
+ return false;
+#else
return CompilerFilter::IsJniCompilationEnabled(compiler_filter_);
+#endif
}
bool IsVerificationEnabled() const {
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index e84cfcbe80..5c2e4dbc51 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -1569,6 +1569,7 @@ bool CodeGenerator::CanMoveNullCheckToUser(HNullCheck* null_check) {
void CodeGenerator::MaybeRecordImplicitNullCheck(HInstruction* instr) {
HNullCheck* null_check = instr->GetImplicitNullCheck();
if (null_check != nullptr) {
+ DCHECK(compiler_options_.GetImplicitNullChecks());
RecordPcInfo(null_check);
}
}
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 31e617baec..98aa5600b4 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -200,12 +200,18 @@ class InvokePolymorphicSlowPathARM64 : public SlowPathCodeARM64 {
#undef __
bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
+#ifdef ART_USE_RESTRICTED_MODE
+ // TODO(Simulator): support intrinsics.
+ USE(invoke);
+ return false;
+#else
Dispatch(invoke);
LocationSummary* res = invoke->GetLocations();
if (res == nullptr) {
return false;
}
return res->Intrinsified();
+#endif // ART_USE_RESTRICTED_MODE
}
#define __ masm->
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index ef84827653..970771424b 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -759,6 +759,51 @@ CompiledMethod* OptimizingCompiler::Emit(ArenaAllocator* allocator,
return compiled_method;
}
+#ifdef ART_USE_RESTRICTED_MODE
+
+// This class acts as a filter and enables gradual enablement of ART Simulator work - we
+// compile (and hence simulate) only limited types of methods.
+class CompilationFilterForRestrictedMode : public HGraphDelegateVisitor {
+ public:
+ explicit CompilationFilterForRestrictedMode(HGraph* graph)
+ : HGraphDelegateVisitor(graph),
+ has_unsupported_instructions_(false) {}
+
+ // Returns true if the graph contains instructions which are not currently supported in
+ // the restricted mode.
+ bool GraphRejected() const { return has_unsupported_instructions_; }
+
+ private:
+ void VisitInstruction(HInstruction*) override {
+ // Currently we don't support compiling methods unless they were annotated with $compile$.
+ RejectGraph();
+ }
+ void RejectGraph() {
+ has_unsupported_instructions_ = true;
+ }
+
+ bool has_unsupported_instructions_;
+};
+
+// Returns whether an ArtMethod, specified by a name, should be compiled. Used in restricted
+// mode.
+//
+// In restricted mode, the simulator will execute only those methods which are compiled; thus
+// this is going to be an effective filter for methods to be simulated.
+//
+// TODO(Simulator): compile and simulate all the methods as in regular host mode.
+bool ShouldMethodBeCompiled(HGraph* graph, const std::string& method_name) {
+ if (method_name.find("$compile$") != std::string::npos) {
+ return true;
+ }
+
+ CompilationFilterForRestrictedMode filter_visitor(graph);
+ filter_visitor.VisitReversePostOrder();
+
+ return !filter_visitor.GraphRejected();
+}
+#endif // ART_USE_RESTRICTED_MODE
+
CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
ArenaStack* arena_stack,
const DexCompilationUnit& dex_compilation_unit,
@@ -958,6 +1003,17 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
return nullptr;
}
+#ifdef ART_USE_RESTRICTED_MODE
+ // Check whether the method should be compiled according to the compilation filter. Note: this
+ // relies on a LocationSummary being available for each instruction so should take place after
+ // register allocation does liveness analysis.
+ // TODO(Simulator): support and compile all methods.
+ std::string method_name = dex_file.PrettyMethod(method_idx);
+ if (!ShouldMethodBeCompiled(graph, method_name)) {
+ return nullptr;
+ }
+#endif // ART_USE_RESTRICTED_MODE
+
codegen->Compile();
pass_observer.DumpDisassembly();
@@ -977,6 +1033,11 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
const DexFile& dex_file = *dex_compilation_unit.GetDexFile();
uint32_t method_idx = dex_compilation_unit.GetDexMethodIndex();
+ // TODO(Simulator): Reenable compilation of intrinsics.
+#ifdef ART_USE_RESTRICTED_MODE
+ return nullptr;
+#endif // ART_USE_RESTRICTED_MODE
+
// Always use the Thumb-2 assembler: some runtime functionality
// (like implicit stack overflow checks) assume Thumb-2.
DCHECK_NE(instruction_set, InstructionSet::kArm);
@@ -1149,6 +1210,8 @@ CompiledMethod* OptimizingCompiler::Compile(const dex::CodeItem* code_item,
}
}
+ // TODO(Simulator): Check for $opt$ in method name and that such method is compiled.
+#ifndef ART_USE_RESTRICTED_MODE
if (kIsDebugBuild &&
compiler_options.CompileArtTest() &&
IsInstructionSetSupported(compiler_options.GetInstructionSet())) {
@@ -1160,6 +1223,7 @@ CompiledMethod* OptimizingCompiler::Compile(const dex::CodeItem* code_item,
bool shouldCompile = method_name.find("$opt$") != std::string::npos;
DCHECK_IMPLIES(compiled_method == nullptr, !shouldCompile) << "Didn't compile " << method_name;
}
+#endif // #ifndef ART_USE_RESTRICTED_MODE
return compiled_method;
}
diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc
index e3cc0ec682..7564bf0b68 100644
--- a/dex2oat/dex2oat.cc
+++ b/dex2oat/dex2oat.cc
@@ -878,6 +878,12 @@ class Dex2Oat final {
break;
}
+#ifdef ART_USE_RESTRICTED_MODE
+ // TODO(Simulator): support signal handling and implicit checks.
+ compiler_options_->implicit_suspend_checks_ = false;
+ compiler_options_->implicit_null_checks_ = false;
+#endif // ART_USE_RESTRICTED_MODE
+
// Done with usage checks, enable watchdog if requested
if (parser_options->watch_dog_enabled) {
int64_t timeout = parser_options->watch_dog_timeout_in_ms > 0
diff --git a/runtime/arch/arm/native_entrypoints_arm.S b/runtime/arch/arm/native_entrypoints_arm.S
index 1666dc8d4b..1f3aae6392 100644
--- a/runtime/arch/arm/native_entrypoints_arm.S
+++ b/runtime/arch/arm/native_entrypoints_arm.S
@@ -63,7 +63,12 @@ ENTRY art_jni_dlsym_lookup_stub
bic ip, #TAGGED_JNI_SP_MASK // ArtMethod** sp
ldr ip, [ip] // ArtMethod* method
ldr ip, [ip, #ART_METHOD_ACCESS_FLAGS_OFFSET] // uint32_t access_flags
+#ifdef ART_USE_RESTRICTED_MODE
+ // Critical native methods are disabled and treated as normal native methods instead.
+ tst ip, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE)
+#else
tst ip, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE)
+#endif
bne .Llookup_stub_fast_or_critical_native
blx artFindNativeMethod
b .Llookup_stub_continue
diff --git a/runtime/arch/arm64/entrypoints_init_arm64.cc b/runtime/arch/arm64/entrypoints_init_arm64.cc
index 3292b15503..acc08c4fd7 100644
--- a/runtime/arch/arm64/entrypoints_init_arm64.cc
+++ b/runtime/arch/arm64/entrypoints_init_arm64.cc
@@ -53,13 +53,12 @@ extern "C" mirror::Object* art_quick_read_barrier_mark_reg09(mirror::Object*);
extern "C" mirror::Object* art_quick_read_barrier_mark_reg10(mirror::Object*);
extern "C" mirror::Object* art_quick_read_barrier_mark_reg11(mirror::Object*);
extern "C" mirror::Object* art_quick_read_barrier_mark_reg12(mirror::Object*);
-extern "C" mirror::Object* art_quick_read_barrier_mark_reg12(mirror::Object*);
extern "C" mirror::Object* art_quick_read_barrier_mark_reg13(mirror::Object*);
extern "C" mirror::Object* art_quick_read_barrier_mark_reg14(mirror::Object*);
extern "C" mirror::Object* art_quick_read_barrier_mark_reg15(mirror::Object*);
-extern "C" mirror::Object* art_quick_read_barrier_mark_reg16(mirror::Object*);
+// extern "C" mirror::Object* art_quick_read_barrier_mark_reg16(mirror::Object*); ip0 is blocked
extern "C" mirror::Object* art_quick_read_barrier_mark_reg17(mirror::Object*);
-extern "C" mirror::Object* art_quick_read_barrier_mark_reg18(mirror::Object*);
+// extern "C" mirror::Object* art_quick_read_barrier_mark_reg18(mirror::Object*); x18 is blocked
extern "C" mirror::Object* art_quick_read_barrier_mark_reg19(mirror::Object*);
extern "C" mirror::Object* art_quick_read_barrier_mark_reg20(mirror::Object*);
extern "C" mirror::Object* art_quick_read_barrier_mark_reg21(mirror::Object*);
@@ -130,8 +129,13 @@ void UpdateReadBarrierEntrypoints(QuickEntryPoints* qpoints, bool is_active) {
qpoints->SetReadBarrierMarkReg28(is_active ? art_quick_read_barrier_mark_reg28 : nullptr);
qpoints->SetReadBarrierMarkReg29(is_active ? art_quick_read_barrier_mark_reg29 : nullptr);
- // Check that array switch cases are at appropriate offsets from the introspection entrypoint.
DCHECK_ALIGNED(art_quick_read_barrier_mark_introspection, 512u);
+
+ // TODO(Simulator): the introspection entrypoints are not currently used in the simulator and
+ // they are not aligned correctly due to the veneer used in CALL_SYMBOL and BRANCH_SYMBOL.
+ // Re-enable these checks when the introspection entrypoints are used and tested.
+#ifndef ART_USE_RESTRICTED_MODE
+ // Check that array switch cases are at appropriate offsets from the introspection entrypoint.
intptr_t array_diff =
reinterpret_cast<intptr_t>(art_quick_read_barrier_mark_introspection_arrays) -
reinterpret_cast<intptr_t>(art_quick_read_barrier_mark_introspection);
@@ -141,6 +145,7 @@ void UpdateReadBarrierEntrypoints(QuickEntryPoints* qpoints, bool is_active) {
reinterpret_cast<intptr_t>(art_quick_read_barrier_mark_introspection_gc_roots) -
reinterpret_cast<intptr_t>(art_quick_read_barrier_mark_introspection);
DCHECK_EQ(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET, gc_roots_diff);
+#endif // ART_USE_RESTRICTED_MODE
// The register 16, i.e. IP0, is reserved, so there is no art_quick_read_barrier_mark_reg16.
// We're using the entry to hold a pointer to the introspection entrypoint instead.
qpoints->SetReadBarrierMarkReg16(is_active ? art_quick_read_barrier_mark_introspection : nullptr);
diff --git a/runtime/arch/arm64/native_entrypoints_arm64.S b/runtime/arch/arm64/native_entrypoints_arm64.S
index 747e572c97..00558c2f98 100644
--- a/runtime/arch/arm64/native_entrypoints_arm64.S
+++ b/runtime/arch/arm64/native_entrypoints_arm64.S
@@ -62,7 +62,12 @@ ENTRY art_jni_dlsym_lookup_stub
bic xIP0, xIP0, #TAGGED_JNI_SP_MASK // ArtMethod** sp
ldr xIP0, [xIP0] // ArtMethod* method
ldr xIP0, [xIP0, #ART_METHOD_ACCESS_FLAGS_OFFSET] // uint32_t access_flags
+#ifdef ART_USE_RESTRICTED_MODE
+ // Critical native methods are disabled and treated as normal native methods instead.
+ mov xIP1, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE)
+#else
mov xIP1, #(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE)
+#endif
tst xIP0, xIP1
b.ne .Llookup_stub_fast_or_critical_native
bl artFindNativeMethod
diff --git a/runtime/arch/riscv64/native_entrypoints_riscv64.S b/runtime/arch/riscv64/native_entrypoints_riscv64.S
index 24c8205c0f..08d5fc04ae 100644
--- a/runtime/arch/riscv64/native_entrypoints_riscv64.S
+++ b/runtime/arch/riscv64/native_entrypoints_riscv64.S
@@ -65,7 +65,12 @@ ENTRY art_jni_dlsym_lookup_stub
andi t0, t0, ~TAGGED_JNI_SP_MASK // ArtMethod** sp
ld t0, (t0) // ArtMethod* method
lw t0, ART_METHOD_ACCESS_FLAGS_OFFSET(t0) // uint32_t access_flags
+#ifdef ART_USE_RESTRICTED_MODE
+ // Critical native methods are disabled and treated as normal native methods instead.
+ li t1, (ACCESS_FLAGS_METHOD_IS_FAST_NATIVE)
+#else
li t1, (ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE)
+#endif
and t0, t0, t1
bnez t0, .Llookup_stub_fast_or_critical_native
call artFindNativeMethod
diff --git a/runtime/arch/x86/native_entrypoints_x86.S b/runtime/arch/x86/native_entrypoints_x86.S
index 9d1c41a069..a676b0f664 100644
--- a/runtime/arch/x86/native_entrypoints_x86.S
+++ b/runtime/arch/x86/native_entrypoints_x86.S
@@ -65,8 +65,13 @@ DEFINE_FUNCTION art_jni_dlsym_lookup_stub
movl THREAD_TOP_QUICK_FRAME_OFFSET(%eax), %eax // uintptr_t tagged_quick_frame
andl LITERAL(TAGGED_JNI_SP_MASK_TOGGLED32), %eax // ArtMethod** sp
movl (%eax), %eax // ArtMethod* method
+#ifdef ART_USE_RESTRICTED_MODE
+ // Critical native methods are disabled and treated as normal native methods instead.
+ testl LITERAL(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE), ART_METHOD_ACCESS_FLAGS_OFFSET(%eax)
+#else
testl LITERAL(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE), \
ART_METHOD_ACCESS_FLAGS_OFFSET(%eax)
+#endif
jne .Llookup_stub_fast_or_critical_native
call SYMBOL(artFindNativeMethod) // (Thread*)
jmp .Llookup_stub_continue
diff --git a/runtime/arch/x86_64/native_entrypoints_x86_64.S b/runtime/arch/x86_64/native_entrypoints_x86_64.S
index 12194ef97c..b42981f030 100644
--- a/runtime/arch/x86_64/native_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/native_entrypoints_x86_64.S
@@ -73,8 +73,14 @@ DEFINE_FUNCTION art_jni_dlsym_lookup_stub
movq THREAD_TOP_QUICK_FRAME_OFFSET(%rdi), %rax // uintptr_t tagged_quick_frame
andq LITERAL(TAGGED_JNI_SP_MASK_TOGGLED64), %rax // ArtMethod** sp
movq (%rax), %rax // ArtMethod* method
+#ifdef ART_USE_RESTRICTED_MODE
+ // Critical native methods are disabled and treated as normal native methods instead.
+ testl LITERAL(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE), \
+ ART_METHOD_ACCESS_FLAGS_OFFSET(%rax)
+#else
testl LITERAL(ACCESS_FLAGS_METHOD_IS_FAST_NATIVE | ACCESS_FLAGS_METHOD_IS_CRITICAL_NATIVE), \
ART_METHOD_ACCESS_FLAGS_OFFSET(%rax)
+#endif
jne .Llookup_stub_fast_or_critical_native
call SYMBOL(artFindNativeMethod) // (Thread*)
jmp .Llookup_stub_continue
diff --git a/runtime/art_method.h b/runtime/art_method.h
index ad55547c3b..1884d715e1 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -470,12 +470,19 @@ class EXPORT ArtMethod final {
return IsCriticalNative(GetAccessFlags());
}
- static bool IsCriticalNative(uint32_t access_flags) {
+ static bool IsCriticalNative([[maybe_unused]] uint32_t access_flags) {
+#ifdef ART_USE_RESTRICTED_MODE
+ // Return false to treat all critical native methods as normal native methods instead, i.e.:
+ // will use the generic JNI trampoline instead.
+ // TODO(Simulator): support critical native methods
+ return false;
+#else
// The presence of the annotation is checked by ClassLinker and recorded in access flags.
// The kAccCriticalNative flag value is used with a different meaning for non-native methods,
// so we need to check the kAccNative flag as well.
constexpr uint32_t mask = kAccCriticalNative | kAccNative;
return (access_flags & mask) == mask;
+#endif
}
// Returns true if the method is managed (not native).
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 767b0c7b8b..f7f3673259 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -1930,8 +1930,17 @@ class BuildGenericJniFrameVisitor final : public QuickArgumentVisitor {
fsc.GetStartFprRegs(reserved_area),
out_args_sp);
+ bool uses_critical_args = critical_native;
+
+#ifdef ART_USE_RESTRICTED_MODE
+ // IsCriticalNative() always returns false so check if the method is actually a critical native
+ // method. If it is then it won't need the JNI environment or jclass arguments.
+ constexpr uint32_t mask = kAccCriticalNative | kAccNative;
+ uses_critical_args = (method->GetAccessFlags() & mask) == mask;
+#endif
+
// First 2 parameters are always excluded for CriticalNative methods.
- if (LIKELY(!critical_native)) {
+ if (LIKELY(!uses_critical_args)) {
// jni environment is always first argument
sm_.AdvancePointer(self->GetJniEnv());
diff --git a/runtime/gc/collector_type.h b/runtime/gc/collector_type.h
index 3c19079c08..4267a7763c 100644
--- a/runtime/gc/collector_type.h
+++ b/runtime/gc/collector_type.h
@@ -75,6 +75,8 @@ static constexpr CollectorType kCollectorTypeDefault =
kCollectorTypeSS
#elif ART_DEFAULT_GC_TYPE_IS_CMS
kCollectorTypeCMS
+#elif ART_DEFAULT_GC_TYPE_IS_MS
+ kCollectorTypeMS
#else
#error "ART default GC type must be set"
#endif
diff --git a/runtime/interpreter/mterp/nterp.cc b/runtime/interpreter/mterp/nterp.cc
index b929444fc6..fc6168c70d 100644
--- a/runtime/interpreter/mterp/nterp.cc
+++ b/runtime/interpreter/mterp/nterp.cc
@@ -35,6 +35,12 @@ namespace art HIDDEN {
namespace interpreter {
bool IsNterpSupported() {
+#ifdef ART_USE_RESTRICTED_MODE
+ // TODO(Simulator): Support Nterp.
+ // Nterp uses the native stack and quick stack frame layout; this will be a complication
+ // for the simulator mode. We should use switch interpreter only for now.
+ return false;
+#else
switch (kRuntimeQuickCodeISA) {
case InstructionSet::kArm:
case InstructionSet::kThumb2:
@@ -48,6 +54,7 @@ bool IsNterpSupported() {
default:
return false;
}
+#endif // #ifdef ART_USE_RESTRICTED_MODE
}
bool CanRuntimeUseNterp() REQUIRES_SHARED(Locks::mutator_lock_) {
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 0186740015..23e06ab792 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -1441,6 +1441,40 @@ static inline void CreatePreAllocatedException(Thread* self,
detailMessageField->SetObject</* kTransactionActive= */ false>(exception->Read(), message);
}
+inline void Runtime::CreatePreAllocatedExceptions(Thread* self) {
+ // Pre-allocate an OutOfMemoryError for the case when we fail to
+ // allocate the exception to be thrown.
+ CreatePreAllocatedException(self,
+ this,
+ &pre_allocated_OutOfMemoryError_when_throwing_exception_,
+ "Ljava/lang/OutOfMemoryError;",
+ "OutOfMemoryError thrown while trying to throw an exception; "
+ "no stack trace available");
+ // Pre-allocate an OutOfMemoryError for the double-OOME case.
+ CreatePreAllocatedException(self,
+ this,
+ &pre_allocated_OutOfMemoryError_when_throwing_oome_,
+ "Ljava/lang/OutOfMemoryError;",
+ "OutOfMemoryError thrown while trying to throw OutOfMemoryError; "
+ "no stack trace available");
+ // Pre-allocate an OutOfMemoryError for the case when we fail to
+ // allocate while handling a stack overflow.
+ CreatePreAllocatedException(self,
+ this,
+ &pre_allocated_OutOfMemoryError_when_handling_stack_overflow_,
+ "Ljava/lang/OutOfMemoryError;",
+ "OutOfMemoryError thrown while trying to handle a stack overflow; "
+ "no stack trace available");
+ // Pre-allocate a NoClassDefFoundError for the common case of failing to find a system class
+ // ahead of checking the application's class loader.
+ CreatePreAllocatedException(self,
+ this,
+ &pre_allocated_NoClassDefFoundError_,
+ "Ljava/lang/NoClassDefFoundError;",
+ "Class not found using the boot class loader; "
+ "no stack trace available");
+}
+
std::string Runtime::GetApexVersions(ArrayRef<const std::string> boot_class_path_locations) {
std::vector<std::string_view> bcp_apexes;
for (std::string_view jar : boot_class_path_locations) {
@@ -1892,6 +1926,12 @@ bool Runtime::Init(RuntimeArgumentMap&& runtime_options_in) {
break;
}
+#ifdef ART_USE_RESTRICTED_MODE
+ // TODO(Simulator): support signal handling and implicit checks.
+ implicit_suspend_checks_ = false;
+ implicit_null_checks_ = false;
+#endif // ART_USE_RESTRICTED_MODE
+
fault_manager.Init(!no_sig_chain_);
if (!no_sig_chain_) {
if (HandlesSignalsInCompiledCode()) {
@@ -2076,38 +2116,7 @@ bool Runtime::Init(RuntimeArgumentMap&& runtime_options_in) {
DCHECK(pre_allocated_NoClassDefFoundError_.Read()->GetClass()
->DescriptorEquals("Ljava/lang/NoClassDefFoundError;"));
} else {
- // Pre-allocate an OutOfMemoryError for the case when we fail to
- // allocate the exception to be thrown.
- CreatePreAllocatedException(self,
- this,
- &pre_allocated_OutOfMemoryError_when_throwing_exception_,
- "Ljava/lang/OutOfMemoryError;",
- "OutOfMemoryError thrown while trying to throw an exception; "
- "no stack trace available");
- // Pre-allocate an OutOfMemoryError for the double-OOME case.
- CreatePreAllocatedException(self,
- this,
- &pre_allocated_OutOfMemoryError_when_throwing_oome_,
- "Ljava/lang/OutOfMemoryError;",
- "OutOfMemoryError thrown while trying to throw OutOfMemoryError; "
- "no stack trace available");
- // Pre-allocate an OutOfMemoryError for the case when we fail to
- // allocate while handling a stack overflow.
- CreatePreAllocatedException(self,
- this,
- &pre_allocated_OutOfMemoryError_when_handling_stack_overflow_,
- "Ljava/lang/OutOfMemoryError;",
- "OutOfMemoryError thrown while trying to handle a stack overflow; "
- "no stack trace available");
-
- // Pre-allocate a NoClassDefFoundError for the common case of failing to find a system class
- // ahead of checking the application's class loader.
- CreatePreAllocatedException(self,
- this,
- &pre_allocated_NoClassDefFoundError_,
- "Ljava/lang/NoClassDefFoundError;",
- "Class not found using the boot class loader; "
- "no stack trace available");
+ CreatePreAllocatedExceptions(self);
}
// Class-roots are setup, we can now finish initializing the JniIdManager.
diff --git a/runtime/runtime.h b/runtime/runtime.h
index 9661f9e514..8f781cff34 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -1118,6 +1118,8 @@ class Runtime {
// See Flags::ReloadAllFlags as well.
static void ReloadAllFlags(const std::string& caller);
+ inline void CreatePreAllocatedExceptions(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
+
// Parses /apex/apex-info-list.xml to build a string containing apex versions of boot classpath
// jars, which is encoded into .oat files.
static std::string GetApexVersions(ArrayRef<const std::string> boot_class_path_locations);
diff --git a/runtime/runtime_globals.h b/runtime/runtime_globals.h
index d3963a52e1..cdab9e0deb 100644
--- a/runtime/runtime_globals.h
+++ b/runtime/runtime_globals.h
@@ -110,7 +110,12 @@ static constexpr ALWAYS_INLINE size_t ModuloPageSize(size_t num) {
// Returns whether the given memory offset can be used for generating
// an implicit null check.
static inline bool CanDoImplicitNullCheckOn(uintptr_t offset) {
+#ifdef ART_USE_RESTRICTED_MODE
+ UNUSED(offset);
+ return false;
+#else
return offset < gPageSize;
+#endif // ART_USE_RESTRICTED_MODE
}
// Required object alignment
diff --git a/test/knownfailures.json b/test/knownfailures.json
index f764fe9fce..4d2d8ae654 100644
--- a/test/knownfailures.json
+++ b/test/knownfailures.json
@@ -1623,5 +1623,19 @@
"The ability to destroy a thread group and the concept of a destroyed ",
"thread group no longer exists. A thread group is eligible to be GC'ed ",
"when there are no live threads in the group and it is otherwise unreachable."]
+ },
+ {
+ "tests": ["004-StackWalk",
+ "141-class-unload",
+ "178-app-image-native-method",
+ "597-deopt-busy-loop",
+ "629-vdex-speed",
+ "638-checker-inline-cache-intrinsic",
+ "661-oat-writer-layout",
+ "692-vdex-secondary-loader",
+ "850-checker-branches",
+ "2042-reference-processing"],
+ "env_vars": {"ART_USE_RESTRICTED_MODE": "true"},
+ "description": ["Test failures when using the restricted mode for simulator."]
}
]
diff --git a/test/run-test b/test/run-test
index a3f1f60504..1117e07a10 100755
--- a/test/run-test
+++ b/test/run-test
@@ -759,6 +759,11 @@ if True:
if prebuild_mode:
run_checker = True
+ if os.environ.get("ART_USE_RESTRICTED_MODE") == "true":
+ # TODO(Simulator): support checker runs.
+ run_checker = False
+ cfg_output_dir = tmp_dir
+
if not target_mode:
cfg_output_dir = tmp_dir
checker_args = f"--arch={host_arch_name.upper()}"