summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--compiler/dex/quick/arm/int_arm.cc6
-rw-r--r--compiler/dex/quick/gen_invoke.cc2
-rw-r--r--compiler/dex/quick/mips/int_mips.cc9
-rw-r--r--compiler/dex/quick/x86/int_x86.cc3
-rw-r--r--compiler/jni/jni_compiler_test.cc4
-rw-r--r--compiler/jni/quick/jni_compiler.cc5
-rw-r--r--runtime/arch/arm/quick_entrypoints_arm.S16
-rw-r--r--runtime/arch/mips/quick_entrypoints_mips.S35
-rw-r--r--runtime/arch/x86/quick_entrypoints_x86.S5
-rw-r--r--runtime/check_jni.cc16
-rw-r--r--runtime/class_linker.cc12
-rw-r--r--runtime/class_linker.h4
-rw-r--r--runtime/dex_file.h2
-rw-r--r--runtime/entrypoints/jni/jni_entrypoints.cc4
-rw-r--r--runtime/entrypoints/quick/quick_entrypoints.h13
-rw-r--r--runtime/entrypoints/quick/quick_jni_entrypoints.cc30
-rw-r--r--runtime/gc/heap.cc22
-rw-r--r--runtime/gc/heap.h5
-rw-r--r--runtime/instrumentation.cc2
-rw-r--r--runtime/instrumentation.h27
-rw-r--r--runtime/interpreter/interpreter.cc2
-rw-r--r--runtime/interpreter/interpreter_goto_table_impl.cc162
-rw-r--r--runtime/jni_internal.cc20
-rw-r--r--runtime/jni_internal.h2
-rw-r--r--runtime/mirror/art_method.cc10
-rw-r--r--runtime/mirror/art_method.h6
-rw-r--r--runtime/mirror/class.cc4
-rw-r--r--runtime/modifiers.h1
-rw-r--r--runtime/native/dalvik_system_DexFile.cc26
-rw-r--r--runtime/native/dalvik_system_VMRuntime.cc343
-rw-r--r--runtime/native/dalvik_system_VMStack.cc13
-rw-r--r--runtime/native/java_lang_Class.cc11
-rw-r--r--runtime/native/java_lang_DexCache.cc6
-rw-r--r--runtime/native/java_lang_Object.cc22
-rw-r--r--runtime/native/java_lang_Runtime.cc14
-rw-r--r--runtime/native/java_lang_String.cc13
-rw-r--r--runtime/native/java_lang_System.cc14
-rw-r--r--runtime/native/java_lang_Thread.cc9
-rw-r--r--runtime/native/java_lang_reflect_Array.cc10
-rw-r--r--runtime/native/java_lang_reflect_Constructor.cc3
-rw-r--r--runtime/native/java_lang_reflect_Field.cc54
-rw-r--r--runtime/native/org_apache_harmony_dalvik_ddmc_DdmServer.cc6
-rw-r--r--runtime/native/scoped_fast_native_object_access.h103
-rw-r--r--runtime/native/sun_misc_Unsafe.cc74
-rw-r--r--runtime/runtime.cc2
-rw-r--r--runtime/runtime.h1
-rw-r--r--runtime/scoped_thread_state_change.h13
-rw-r--r--runtime/thread-inl.h8
-rw-r--r--runtime/utils.cc2
-rw-r--r--runtime/utils_test.cc14
-rw-r--r--test/JniTest/JniTest.java21
-rw-r--r--test/JniTest/jni_test.cc16
52 files changed, 913 insertions, 314 deletions
diff --git a/compiler/dex/quick/arm/int_arm.cc b/compiler/dex/quick/arm/int_arm.cc
index 69ea4e9ca3..c3140a5cac 100644
--- a/compiler/dex/quick/arm/int_arm.cc
+++ b/compiler/dex/quick/arm/int_arm.cc
@@ -867,12 +867,14 @@ void ArmMir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
}
int reg_ptr;
+ bool allocated_reg_ptr_temp = false;
if (constant_index) {
reg_ptr = rl_array.low_reg;
- } else if (IsTemp(rl_array.low_reg)) {
+ } else if (IsTemp(rl_array.low_reg) && !card_mark) {
Clobber(rl_array.low_reg);
reg_ptr = rl_array.low_reg;
} else {
+ allocated_reg_ptr_temp = true;
reg_ptr = AllocTemp();
}
@@ -923,7 +925,7 @@ void ArmMir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
StoreBaseIndexed(reg_ptr, rl_index.low_reg, rl_src.low_reg,
scale, size);
}
- if (!constant_index) {
+ if (allocated_reg_ptr_temp) {
FreeTemp(reg_ptr);
}
if (card_mark) {
diff --git a/compiler/dex/quick/gen_invoke.cc b/compiler/dex/quick/gen_invoke.cc
index 3def7f5404..64938f3a73 100644
--- a/compiler/dex/quick/gen_invoke.cc
+++ b/compiler/dex/quick/gen_invoke.cc
@@ -1249,7 +1249,7 @@ bool Mir2Lir::GenIntrinsic(CallInfo* info) {
}
} else if (tgt_methods_declaring_class.starts_with("Ljava/lang/Float;")) {
std::string tgt_method(PrettyMethod(info->index, *cu_->dex_file));
- if (tgt_method == "int java.lang.Float.float_to_raw_int_bits(float)") {
+ if (tgt_method == "int java.lang.Float.floatToRawIntBits(float)") {
return GenInlinedFloatCvt(info);
}
if (tgt_method == "float java.lang.Float.intBitsToFloat(int)") {
diff --git a/compiler/dex/quick/mips/int_mips.cc b/compiler/dex/quick/mips/int_mips.cc
index 218ed489b4..02ab04ef82 100644
--- a/compiler/dex/quick/mips/int_mips.cc
+++ b/compiler/dex/quick/mips/int_mips.cc
@@ -498,12 +498,14 @@ void MipsMir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
rl_array = LoadValue(rl_array, kCoreReg);
rl_index = LoadValue(rl_index, kCoreReg);
int reg_ptr = INVALID_REG;
- if (IsTemp(rl_array.low_reg)) {
+ bool allocated_reg_ptr_temp = false;
+ if (IsTemp(rl_array.low_reg) && !card_mark) {
Clobber(rl_array.low_reg);
reg_ptr = rl_array.low_reg;
} else {
reg_ptr = AllocTemp();
OpRegCopy(reg_ptr, rl_array.low_reg);
+ allocated_reg_ptr_temp = true;
}
/* null object? */
@@ -538,8 +540,6 @@ void MipsMir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
}
StoreBaseDispWide(reg_ptr, 0, rl_src.low_reg, rl_src.high_reg);
-
- FreeTemp(reg_ptr);
} else {
rl_src = LoadValue(rl_src, reg_class);
if (needs_range_check) {
@@ -549,6 +549,9 @@ void MipsMir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
StoreBaseIndexed(reg_ptr, rl_index.low_reg, rl_src.low_reg,
scale, size);
}
+ if (allocated_reg_ptr_temp) {
+ FreeTemp(reg_ptr);
+ }
if (card_mark) {
MarkGCCard(rl_src.low_reg, rl_array.low_reg);
}
diff --git a/compiler/dex/quick/x86/int_x86.cc b/compiler/dex/quick/x86/int_x86.cc
index 324d975fc8..3fbc7634be 100644
--- a/compiler/dex/quick/x86/int_x86.cc
+++ b/compiler/dex/quick/x86/int_x86.cc
@@ -503,7 +503,8 @@ void X86Mir2Lir::GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array,
rl_src.high_reg, size, INVALID_SREG);
}
if (card_mark) {
- FreeTemp(rl_index.low_reg); // Ensure there are 2 free regs for card mark.
+ // Free rl_index if its a temp. Ensures there are 2 free regs for card mark.
+ FreeTemp(rl_index.low_reg);
MarkGCCard(rl_src.low_reg, rl_array.low_reg);
}
}
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
index a653ab42a9..667b913039 100644
--- a/compiler/jni/jni_compiler_test.cc
+++ b/compiler/jni/jni_compiler_test.cc
@@ -152,7 +152,7 @@ TEST_F(JniCompilerTest, CompileAndRunIntMethodThroughStub) {
std::string reason;
ASSERT_TRUE(
Runtime::Current()->GetJavaVM()->LoadNativeLibrary("", soa.Decode<mirror::ClassLoader*>(class_loader_),
- reason)) << reason;
+ &reason)) << reason;
jint result = env_->CallNonvirtualIntMethod(jobj_, jklass_, jmethod_, 24);
EXPECT_EQ(25, result);
@@ -167,7 +167,7 @@ TEST_F(JniCompilerTest, CompileAndRunStaticIntMethodThroughStub) {
std::string reason;
ASSERT_TRUE(
Runtime::Current()->GetJavaVM()->LoadNativeLibrary("", soa.Decode<mirror::ClassLoader*>(class_loader_),
- reason)) << reason;
+ &reason)) << reason;
jint result = env_->CallStaticIntMethod(jklass_, jmethod_, 42);
EXPECT_EQ(43, result);
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index b6b15f94eb..1c9aed83c3 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -81,7 +81,6 @@ CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver& compiler,
UniquePtr<JniCallingConvention> end_jni_conv(
JniCallingConvention::Create(is_static, is_synchronized, jni_end_shorty, instruction_set));
-
// Assembler that holds generated instructions
UniquePtr<Assembler> jni_asm(Assembler::Create(instruction_set));
@@ -354,9 +353,9 @@ CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver& compiler,
// 15. Process pending exceptions from JNI call or monitor exit.
__ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), 0);
- // 16. Remove activation - no need to restore callee save registers because we didn't clobber
+ // 16. Remove activation - need to restore callee save registers since the GC may have changed
// them.
- __ RemoveFrame(frame_size, std::vector<ManagedRegister>());
+ __ RemoveFrame(frame_size, callee_save_regs);
// 17. Finalize code generation
__ EmitSlowPaths();
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index d0731770a8..c98b764424 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -67,16 +67,16 @@
.endm
.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
- ldr lr, [sp, #28] @ restore lr for return
- add sp, #32 @ unwind stack
+ add sp, #4 @ bottom word holds Method*
+ pop {r5-r8, r10-r11, lr} @ 7 words of callee saves
.cfi_adjust_cfa_offset -32
.endm
.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
- ldr lr, [sp, #28] @ restore lr for return
- add sp, #32 @ unwind stack
+ add sp, #4 @ bottom word holds Method*
+ pop {r5-r8, r10-r11, lr} @ 7 words of callee saves
.cfi_adjust_cfa_offset -32
- bx lr @ return
+ bx lr @ return
.endm
/*
@@ -103,10 +103,8 @@
.endm
.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
- ldr r1, [sp, #8] @ restore non-callee save r1
- ldrd r2, [sp, #12] @ restore non-callee saves r2-r3
- ldr lr, [sp, #44] @ restore lr
- add sp, #48 @ rewind sp
+ add sp, #8 @ rewind sp
+ pop {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves
.cfi_adjust_cfa_offset -48
.endm
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index 7780bb3a25..897aaf4036 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -88,15 +88,29 @@
.endm
.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
- lw $gp, 52($sp)
lw $ra, 60($sp)
+ lw $s8, 56($sp)
+ lw $gp, 52($sp)
+ lw $s7, 48($sp)
+ lw $s6, 44($sp)
+ lw $s5, 40($sp)
+ lw $s4, 36($sp)
+ lw $s3, 32($sp)
+ lw $s2, 28($sp)
addiu $sp, $sp, 64
.cfi_adjust_cfa_offset -64
.endm
.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
- lw $gp, 52($sp)
lw $ra, 60($sp)
+ lw $s8, 56($sp)
+ lw $gp, 52($sp)
+ lw $s7, 48($sp)
+ lw $s6, 44($sp)
+ lw $s5, 40($sp)
+ lw $s4, 36($sp)
+ lw $s3, 32($sp)
+ lw $s2, 28($sp)
jr $ra
addiu $sp, $sp, 64
.cfi_adjust_cfa_offset -64
@@ -138,11 +152,18 @@
.endm
.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
- lw $ra, 60($sp) # restore $ra
- lw $gp, 52($sp) # restore $gp
- lw $a1, 4($sp) # restore non-callee save $a1
- lw $a2, 8($sp) # restore non-callee save $a2
- lw $a3, 12($sp) # restore non-callee save $a3
+ lw $ra, 60($sp)
+ lw $s8, 56($sp)
+ lw $gp, 52($sp)
+ lw $s7, 48($sp)
+ lw $s6, 44($sp)
+ lw $s5, 40($sp)
+ lw $s4, 36($sp)
+ lw $s3, 32($sp)
+ lw $s2, 28($sp)
+ lw $a3, 12($sp)
+ lw $a2, 8($sp)
+ lw $a1, 4($sp)
addiu $sp, $sp, 64 # pop frame
.cfi_adjust_cfa_offset -64
.endm
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 9fce72f780..d7e1be8170 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -41,7 +41,10 @@ MACRO0(SETUP_REF_ONLY_CALLEE_SAVE_FRAME)
END_MACRO
MACRO0(RESTORE_REF_ONLY_CALLEE_SAVE_FRAME)
- addl MACRO_LITERAL(28), %esp // Unwind stack up to return address
+ addl MACRO_LITERAL(16), %esp // Unwind stack up to return address
+ POP ebp // Restore callee saves (ebx is saved/restored by the upcall)
+ POP esi
+ POP edi
.cfi_adjust_cfa_offset -28
END_MACRO
diff --git a/runtime/check_jni.cc b/runtime/check_jni.cc
index 54f314391b..54cbfe6ea5 100644
--- a/runtime/check_jni.cc
+++ b/runtime/check_jni.cc
@@ -90,12 +90,6 @@ static bool IsSirtLocalRef(JNIEnv* env, jobject localRef) {
reinterpret_cast<JNIEnvExt*>(env)->self->SirtContains(localRef);
}
-// Hack to allow forcecopy to work with jniGetNonMovableArrayElements.
-// The code deliberately uses an invalid sequence of operations, so we
-// need to pass it through unmodified. Review that code before making
-// any changes here.
-#define kNoCopyMagic 0xd5aab57f
-
// Flags passed into ScopedCheck.
#define kFlag_Default 0x0000
@@ -335,7 +329,7 @@ class ScopedCheck {
return;
}
mirror::Class* c = soa_.Decode<mirror::Class*>(java_class);
- if (!c->IsAssignableFrom(m->GetDeclaringClass())) {
+ if (!m->GetDeclaringClass()->IsAssignableFrom(c)) {
JniAbortF(function_name_, "can't call static %s on class %s",
PrettyMethod(m).c_str(), PrettyClass(c).c_str());
}
@@ -1098,10 +1092,6 @@ static void* CreateGuardedPACopy(JNIEnv* env, const jarray java_array, jboolean*
* back into the managed heap, and may or may not release the underlying storage.
*/
static void ReleaseGuardedPACopy(JNIEnv* env, jarray java_array, void* dataBuf, int mode) {
- if (reinterpret_cast<uintptr_t>(dataBuf) == kNoCopyMagic) {
- return;
- }
-
ScopedObjectAccess soa(env);
mirror::Array* a = soa.Decode<mirror::Array*>(java_array);
@@ -1596,9 +1586,7 @@ struct ForceCopyGetChecker {
template<typename ResultT>
ResultT Check(JNIEnv* env, jarray array, jboolean* isCopy, ResultT result) {
if (force_copy && result != NULL) {
- if (no_copy != kNoCopyMagic) {
- result = reinterpret_cast<ResultT>(CreateGuardedPACopy(env, array, isCopy));
- }
+ result = reinterpret_cast<ResultT>(CreateGuardedPACopy(env, array, isCopy));
}
return result;
}
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index c37b548cad..aa5f2bf21e 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -2674,7 +2674,6 @@ mirror::Class* ClassLinker::CreateProxyClass(mirror::String* name,
klass->SetName(name);
mirror::Class* proxy_class = GetClassRoot(kJavaLangReflectProxy);
klass->SetDexCache(proxy_class->GetDexCache());
-
klass->SetStatus(mirror::Class::kStatusIdx, self);
// Instance fields are inherited, but we add a couple of static fields...
@@ -2785,6 +2784,9 @@ mirror::Class* ClassLinker::CreateProxyClass(mirror::String* name,
CHECK_EQ(synth_proxy_class->GetInterfaces(), interfaces);
CHECK_EQ(synth_proxy_class->GetThrows(), throws);
}
+ std::string descriptor(GetDescriptorForProxy(klass.get()));
+ mirror::Class* existing = InsertClass(descriptor.c_str(), klass.get(), Hash(descriptor.c_str()));
+ CHECK(existing == nullptr);
return klass.get();
}
@@ -4162,10 +4164,10 @@ mirror::ArtMethod* ClassLinker::ResolveMethod(const DexFile& dex_file,
}
mirror::ArtField* ClassLinker::ResolveField(const DexFile& dex_file,
- uint32_t field_idx,
- mirror::DexCache* dex_cache,
- mirror::ClassLoader* class_loader,
- bool is_static) {
+ uint32_t field_idx,
+ mirror::DexCache* dex_cache,
+ mirror::ClassLoader* class_loader,
+ bool is_static) {
DCHECK(dex_cache != NULL);
mirror::ArtField* resolved = dex_cache->GetResolvedField(field_idx);
if (resolved != NULL) {
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index baeec66f79..11ba78b36a 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -346,10 +346,6 @@ class ClassLinker {
return quick_resolution_trampoline_;
}
- InternTable* GetInternTable() const {
- return intern_table_;
- }
-
// Attempts to insert a class into a class table. Returns NULL if
// the class was inserted, otherwise returns an existing class with
// the same descriptor and ClassLoader.
diff --git a/runtime/dex_file.h b/runtime/dex_file.h
index 7f92f4930a..12e8440d2a 100644
--- a/runtime/dex_file.h
+++ b/runtime/dex_file.h
@@ -33,6 +33,8 @@
namespace art {
+// TODO: remove dependencies on mirror classes, primarily by moving
+// EncodedStaticFieldValueIterator to its own file.
namespace mirror {
class ArtField;
class ArtMethod;
diff --git a/runtime/entrypoints/jni/jni_entrypoints.cc b/runtime/entrypoints/jni/jni_entrypoints.cc
index 83d3a584c5..16364fcdb7 100644
--- a/runtime/entrypoints/jni/jni_entrypoints.cc
+++ b/runtime/entrypoints/jni/jni_entrypoints.cc
@@ -41,7 +41,7 @@ extern "C" void* artFindNativeMethod() {
return NULL;
} else {
// Register so that future calls don't come here
- method->RegisterNative(self, native_code);
+ method->RegisterNative(self, native_code, false);
return native_code;
}
}
@@ -115,7 +115,7 @@ extern "C" const void* artWorkAroundAppJniBugs(Thread* self, intptr_t* sp)
const void* code = reinterpret_cast<const void*>(jni_method->GetNativeGcMap());
if (UNLIKELY(code == NULL)) {
code = GetJniDlsymLookupStub();
- jni_method->RegisterNative(self, code);
+ jni_method->RegisterNative(self, code, false);
}
return code;
}
diff --git a/runtime/entrypoints/quick/quick_entrypoints.h b/runtime/entrypoints/quick/quick_entrypoints.h
index e9964ad48c..c8a85a0fe3 100644
--- a/runtime/entrypoints/quick/quick_entrypoints.h
+++ b/runtime/entrypoints/quick/quick_entrypoints.h
@@ -142,22 +142,23 @@ struct PACKED(4) QuickEntryPoints {
// JNI entrypoints.
-extern uint32_t JniMethodStart(Thread* self) UNLOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
+// TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI.
+extern uint32_t JniMethodStart(Thread* self) NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self)
- UNLOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
+ NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self)
- SHARED_LOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
+ NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie, jobject locked,
Thread* self)
- SHARED_LOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
+ NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
extern mirror::Object* JniMethodEndWithReference(jobject result, uint32_t saved_local_ref_cookie,
Thread* self)
- SHARED_LOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
+ NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
extern mirror::Object* JniMethodEndWithReferenceSynchronized(jobject result,
uint32_t saved_local_ref_cookie,
jobject locked, Thread* self)
- SHARED_LOCK_FUNCTION(Locks::mutator_lock_) HOT_ATTR;
+ NO_THREAD_SAFETY_ANALYSIS HOT_ATTR;
} // namespace art
diff --git a/runtime/entrypoints/quick/quick_jni_entrypoints.cc b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
index 27ae59b9b0..59da7a05cb 100644
--- a/runtime/entrypoints/quick/quick_jni_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
@@ -30,10 +30,14 @@ namespace art {
// Called on entry to JNI, transition out of Runnable and release share of mutator_lock_.
extern uint32_t JniMethodStart(Thread* self) {
JNIEnvExt* env = self->GetJniEnv();
- DCHECK(env != NULL);
+ DCHECK(env != nullptr);
uint32_t saved_local_ref_cookie = env->local_ref_cookie;
env->local_ref_cookie = env->locals.GetSegmentState();
- self->TransitionFromRunnableToSuspended(kNative);
+ mirror::ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
+ if (!native_method->IsFastNative()) {
+ // When not fast JNI we transition out of runnable.
+ self->TransitionFromRunnableToSuspended(kNative);
+ }
return saved_local_ref_cookie;
}
@@ -42,6 +46,20 @@ extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self) {
return JniMethodStart(self);
}
+// TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI.
+static void GoToRunnable(Thread* self) NO_THREAD_SAFETY_ANALYSIS {
+ mirror::ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
+ bool is_fast = native_method->IsFastNative();
+ if (!is_fast) {
+ self->TransitionFromSuspendedToRunnable();
+ } else if (UNLIKELY(self->TestAllFlags())) {
+ // In fast JNI mode we never transitioned out of runnable. Perform a suspend check if there
+ // is a flag raised.
+ DCHECK(Locks::mutator_lock_->IsSharedHeld(self));
+ CheckSuspend(self);
+ }
+}
+
static void PopLocalReferences(uint32_t saved_local_ref_cookie, Thread* self) {
JNIEnvExt* env = self->GetJniEnv();
env->locals.SetSegmentState(env->local_ref_cookie);
@@ -50,21 +68,21 @@ static void PopLocalReferences(uint32_t saved_local_ref_cookie, Thread* self) {
}
extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self) {
- self->TransitionFromSuspendedToRunnable();
+ GoToRunnable(self);
PopLocalReferences(saved_local_ref_cookie, self);
}
extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie, jobject locked,
Thread* self) {
- self->TransitionFromSuspendedToRunnable();
+ GoToRunnable(self);
UnlockJniSynchronizedMethod(locked, self); // Must decode before pop.
PopLocalReferences(saved_local_ref_cookie, self);
}
extern mirror::Object* JniMethodEndWithReference(jobject result, uint32_t saved_local_ref_cookie,
Thread* self) {
- self->TransitionFromSuspendedToRunnable();
+ GoToRunnable(self);
mirror::Object* o = self->DecodeJObject(result); // Must decode before pop.
PopLocalReferences(saved_local_ref_cookie, self);
// Process result.
@@ -80,7 +98,7 @@ extern mirror::Object* JniMethodEndWithReference(jobject result, uint32_t saved_
extern mirror::Object* JniMethodEndWithReferenceSynchronized(jobject result,
uint32_t saved_local_ref_cookie,
jobject locked, Thread* self) {
- self->TransitionFromSuspendedToRunnable();
+ GoToRunnable(self);
UnlockJniSynchronizedMethod(locked, self); // Must decode before pop.
mirror::Object* o = self->DecodeJObject(result);
PopLocalReferences(saved_local_ref_cookie, self);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index ed902420af..d26e28cb47 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -2044,24 +2044,22 @@ bool Heap::IsGCRequestPending() const {
return concurrent_start_bytes_ != std::numeric_limits<size_t>::max();
}
-void Heap::RegisterNativeAllocation(int bytes) {
+void Heap::RegisterNativeAllocation(JNIEnv* env, int bytes) {
// Total number of native bytes allocated.
native_bytes_allocated_.fetch_add(bytes);
- Thread* self = Thread::Current();
if (static_cast<size_t>(native_bytes_allocated_) > native_footprint_gc_watermark_) {
// The second watermark is higher than the gc watermark. If you hit this it means you are
// allocating native objects faster than the GC can keep up with.
if (static_cast<size_t>(native_bytes_allocated_) > native_footprint_limit_) {
- JNIEnv* env = self->GetJniEnv();
// Can't do this in WellKnownClasses::Init since System is not properly set up at that
// point.
- if (WellKnownClasses::java_lang_System_runFinalization == NULL) {
+ if (UNLIKELY(WellKnownClasses::java_lang_System_runFinalization == NULL)) {
DCHECK(WellKnownClasses::java_lang_System != NULL);
WellKnownClasses::java_lang_System_runFinalization =
CacheMethod(env, WellKnownClasses::java_lang_System, true, "runFinalization", "()V");
- assert(WellKnownClasses::java_lang_System_runFinalization != NULL);
+ CHECK(WellKnownClasses::java_lang_System_runFinalization != NULL);
}
- if (WaitForConcurrentGcToComplete(self) != collector::kGcTypeNone) {
+ if (WaitForConcurrentGcToComplete(ThreadForEnv(env)) != collector::kGcTypeNone) {
// Just finished a GC, attempt to run finalizers.
env->CallStaticVoidMethod(WellKnownClasses::java_lang_System,
WellKnownClasses::java_lang_System_runFinalization);
@@ -2080,20 +2078,22 @@ void Heap::RegisterNativeAllocation(int bytes) {
UpdateMaxNativeFootprint();
} else {
if (!IsGCRequestPending()) {
- RequestConcurrentGC(self);
+ RequestConcurrentGC(ThreadForEnv(env));
}
}
}
}
-void Heap::RegisterNativeFree(int bytes) {
+void Heap::RegisterNativeFree(JNIEnv* env, int bytes) {
int expected_size, new_size;
do {
expected_size = native_bytes_allocated_.load();
new_size = expected_size - bytes;
- if (new_size < 0) {
- ThrowRuntimeException("attempted to free %d native bytes with only %d native bytes registered as allocated",
- bytes, expected_size);
+ if (UNLIKELY(new_size < 0)) {
+ ScopedObjectAccess soa(env);
+ env->ThrowNew(WellKnownClasses::java_lang_RuntimeException,
+ StringPrintf("Attempted to free %d native bytes with only %d native bytes "
+ "registered as allocated", bytes, expected_size).c_str());
break;
}
} while (!native_bytes_allocated_.compare_and_swap(expected_size, new_size));
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index ffd3034674..1c2b7efc22 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -146,9 +146,8 @@ class Heap {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void ThrowOutOfMemoryError(size_t byte_count, bool large_object_allocation);
- void RegisterNativeAllocation(int bytes)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- void RegisterNativeFree(int bytes) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void RegisterNativeAllocation(JNIEnv* env, int bytes);
+ void RegisterNativeFree(JNIEnv* env, int bytes);
// The given reference is believed to be to an object in the Java heap, check the soundness of it.
void VerifyObjectImpl(const mirror::Object* o);
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index 481cbad3b8..8316bc56b3 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -293,6 +293,7 @@ void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t ev
have_exception_caught_listeners_ = true;
}
ConfigureStubs(require_entry_exit_stubs, require_interpreter);
+ UpdateInterpreterHandlerTable();
}
void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
@@ -341,6 +342,7 @@ void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t
have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
}
ConfigureStubs(require_entry_exit_stubs, require_interpreter);
+ UpdateInterpreterHandlerTable();
}
void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index 28f95553f8..7a0aaf7858 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -38,6 +38,14 @@ namespace instrumentation {
const bool kVerboseInstrumentation = false;
+// Interpreter handler tables.
+enum InterpreterHandlerTable {
+ kMainHandlerTable = 0, // Main handler table: no suspend check, no instrumentation.
+ kAlternativeHandlerTable = 1, // Alternative handler table: suspend check and/or instrumentation
+ // enabled.
+ kNumHandlerTables
+};
+
// Instrumentation event listener API. Registered listeners will get the appropriate call back for
// the events they are listening for. The call backs supply the thread, method and dex_pc the event
// occurred upon. The thread may or may not be Thread::Current().
@@ -95,7 +103,8 @@ class Instrumentation {
interpret_only_(false), forced_interpret_only_(false),
have_method_entry_listeners_(false), have_method_exit_listeners_(false),
have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
- have_exception_caught_listeners_(false) {}
+ have_exception_caught_listeners_(false),
+ interpreter_handler_table_(kMainHandlerTable) {}
// Add a listener to be notified of the masked together sent of instrumentation events. This
// suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
@@ -110,6 +119,10 @@ class Instrumentation {
EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
+ InterpreterHandlerTable GetInterpreterHandlerTable() const {
+ return interpreter_handler_table_;
+ }
+
// Update the code of a method respecting any installed stubs.
void UpdateMethodsCode(mirror::ArtMethod* method, const void* code) const;
@@ -149,6 +162,11 @@ class Instrumentation {
return have_dex_pc_listeners_;
}
+ bool IsActive() const {
+ return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
+ have_exception_caught_listeners_ || have_method_unwind_listeners_;
+ }
+
// Inform listeners that a method has been entered. A dex PC is provided as we may install
// listeners into executing code and get method enter events for methods already on the stack.
void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
@@ -215,6 +233,10 @@ class Instrumentation {
EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
+ void UpdateInterpreterHandlerTable() {
+ interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
+ }
+
void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
const mirror::ArtMethod* method, uint32_t dex_pc) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -267,6 +289,9 @@ class Instrumentation {
std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
std::list<InstrumentationListener*> exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
+ // Current interpreter handler table. This is updated each time the thread state flags are modified.
+ InterpreterHandlerTable interpreter_handler_table_;
+
DISALLOW_COPY_AND_ASSIGN(Instrumentation);
};
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index 48c00148b4..d7555ddb6a 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -269,7 +269,7 @@ enum InterpreterImplKind {
kComputedGotoImplKind // computed-goto-based interpreter implementation.
};
-static const InterpreterImplKind kInterpreterImplKind = kSwitchImpl;
+static const InterpreterImplKind kInterpreterImplKind = kComputedGotoImplKind;
static JValue Execute(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
ShadowFrame& shadow_frame, JValue result_register)
diff --git a/runtime/interpreter/interpreter_goto_table_impl.cc b/runtime/interpreter/interpreter_goto_table_impl.cc
index 3a91b8c3a6..aa6bcd696f 100644
--- a/runtime/interpreter/interpreter_goto_table_impl.cc
+++ b/runtime/interpreter/interpreter_goto_table_impl.cc
@@ -51,14 +51,8 @@ namespace interpreter {
} \
} while (false)
-#define UPDATE_HANDLER_TABLE() \
- do { \
- if (UNLIKELY(instrumentation->HasDexPcListeners())) { \
- currentHandlersTable = instrumentationHandlersTable; \
- } else { \
- currentHandlersTable = handlersTable; \
- } \
- } while (false);
+#define UPDATE_HANDLER_TABLE() \
+ currentHandlersTable = handlersTable[Runtime::Current()->GetInstrumentation()->GetInterpreterHandlerTable()]
#define UNREACHABLE_CODE_CHECK() \
do { \
@@ -70,10 +64,77 @@ namespace interpreter {
#define HANDLE_INSTRUCTION_START(opcode) op_##opcode: // NOLINT(whitespace/labels)
#define HANDLE_INSTRUCTION_END() UNREACHABLE_CODE_CHECK()
+/**
+ * Interpreter based on computed goto tables.
+ *
+ * Each instruction is associated to a handler. This handler is responsible for executing the
+ * instruction and jump to the next instruction's handler.
+ * In order to limit the cost of instrumentation, we have two handler tables:
+ * - the "main" handler table: it contains handlers for normal execution of each instruction without
+ * handling of instrumentation.
+ * - the "alternative" handler table: it contains alternative handlers which first handle
+ * instrumentation before jumping to the corresponding "normal" instruction's handler.
+ *
+ * When instrumentation is active, the interpreter uses the "alternative" handler table. Otherwise
+ * it uses the "main" handler table.
+ *
+ * The current handler table is the handler table being used by the interpreter. It is updated:
+ * - on backward branch (goto, if and switch instructions)
+ * - after invoke
+ * - when an exception is thrown.
+ * This allows to support an attaching debugger to an already running application for instance.
+ *
+ * For a fast handler table update, handler tables are stored in an array of handler tables. Each
+ * handler table is represented by the InterpreterHandlerTable enum which allows to associate it
+ * to an index in this array of handler tables ((see Instrumentation::GetInterpreterHandlerTable).
+ *
+ * Here's the current layout of this array of handler tables:
+ *
+ * ---------------------+---------------+
+ * | NOP | (handler for NOP instruction)
+ * +---------------+
+ * "main" | MOVE | (handler for MOVE instruction)
+ * handler table +---------------+
+ * | ... |
+ * +---------------+
+ * | UNUSED_FF | (handler for UNUSED_FF instruction)
+ * ---------------------+---------------+
+ * | NOP | (alternative handler for NOP instruction)
+ * +---------------+
+ * "alternative" | MOVE | (alternative handler for MOVE instruction)
+ * handler table +---------------+
+ * | ... |
+ * +---------------+
+ * | UNUSED_FF | (alternative handler for UNUSED_FF instruction)
+ * ---------------------+---------------+
+ *
+ */
template<bool do_access_check>
JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem* code_item,
ShadowFrame& shadow_frame, JValue result_register) {
- bool do_assignability_check = do_access_check;
+ // Define handler tables:
+ // - The main handler table contains execution handlers for each instruction.
+ // - The alternative handler table contains prelude handlers which check for thread suspend and
+ // manage instrumentation before jumping to the execution handler.
+ static const void* const handlersTable[instrumentation::kNumHandlerTables][kNumPackedOpcodes] = {
+ {
+ // Main handler table.
+#define INSTRUCTION_HANDLER(o, code, n, f, r, i, a, v) &&op_##code,
+#include "dex_instruction_list.h"
+ DEX_INSTRUCTION_LIST(INSTRUCTION_HANDLER)
+#undef DEX_INSTRUCTION_LIST
+#undef INSTRUCTION_HANDLER
+ }, {
+ // Alternative handler table.
+#define INSTRUCTION_HANDLER(o, code, n, f, r, i, a, v) &&alt_op_##code,
+#include "dex_instruction_list.h"
+ DEX_INSTRUCTION_LIST(INSTRUCTION_HANDLER)
+#undef DEX_INSTRUCTION_LIST
+#undef INSTRUCTION_HANDLER
+ }
+ };
+
+ const bool do_assignability_check = do_access_check;
if (UNLIKELY(!shadow_frame.HasReferenceArray())) {
LOG(FATAL) << "Invalid shadow frame for interpreter use";
return JValue();
@@ -81,35 +142,17 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
self->VerifyStack();
uint32_t dex_pc = shadow_frame.GetDexPC();
- const instrumentation::Instrumentation* const instrumentation = Runtime::Current()->GetInstrumentation();
+ const Instruction* inst = Instruction::At(code_item->insns_ + dex_pc);
+ uint16_t inst_data;
+ const void* const* currentHandlersTable;
+ UPDATE_HANDLER_TABLE();
if (LIKELY(dex_pc == 0)) { // We are entering the method as opposed to deoptimizing..
+ instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
if (UNLIKELY(instrumentation->HasMethodEntryListeners())) {
instrumentation->MethodEnterEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
shadow_frame.GetMethod(), 0);
}
}
- const Instruction* inst = Instruction::At(code_item->insns_ + dex_pc);
- uint16_t inst_data;
-
- // Define handlers table.
- static const void* handlersTable[kNumPackedOpcodes] = {
-#define INSTRUCTION_HANDLER(o, code, n, f, r, i, a, v) &&op_##code,
-#include "dex_instruction_list.h"
- DEX_INSTRUCTION_LIST(INSTRUCTION_HANDLER)
-#undef DEX_INSTRUCTION_LIST
-#undef INSTRUCTION_HANDLER
- };
-
- static const void* instrumentationHandlersTable[kNumPackedOpcodes] = {
-#define INSTRUCTION_HANDLER(o, code, n, f, r, i, a, v) &&instrumentation_op_##code,
-#include "dex_instruction_list.h"
- DEX_INSTRUCTION_LIST(INSTRUCTION_HANDLER)
-#undef DEX_INSTRUCTION_LIST
-#undef INSTRUCTION_HANDLER
- };
-
- const void** currentHandlersTable;
- UPDATE_HANDLER_TABLE();
// Jump to first instruction.
ADVANCE(0);
@@ -207,6 +250,7 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
}
+ instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
if (UNLIKELY(instrumentation->HasMethodExitListeners())) {
instrumentation->MethodExitEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
shadow_frame.GetMethod(), dex_pc,
@@ -222,6 +266,7 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
}
+ instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
if (UNLIKELY(instrumentation->HasMethodExitListeners())) {
instrumentation->MethodExitEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
shadow_frame.GetMethod(), dex_pc,
@@ -238,6 +283,7 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
}
+ instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
if (UNLIKELY(instrumentation->HasMethodExitListeners())) {
instrumentation->MethodExitEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
shadow_frame.GetMethod(), dex_pc,
@@ -253,6 +299,7 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
}
+ instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
if (UNLIKELY(instrumentation->HasMethodExitListeners())) {
instrumentation->MethodExitEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
shadow_frame.GetMethod(), dex_pc,
@@ -286,6 +333,7 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
HANDLE_PENDING_EXCEPTION();
}
}
+ instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
if (UNLIKELY(instrumentation->HasMethodExitListeners())) {
instrumentation->MethodExitEvent(self, shadow_frame.GetThisObject(code_item->ins_size_),
shadow_frame.GetMethod(), dex_pc,
@@ -547,8 +595,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
}
@@ -559,8 +607,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
}
@@ -571,8 +619,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
}
@@ -583,8 +631,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
}
@@ -595,8 +643,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
}
@@ -688,8 +736,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -704,8 +752,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -720,8 +768,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -736,8 +784,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -752,8 +800,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -768,8 +816,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -784,8 +832,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -800,8 +848,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -816,8 +864,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -832,8 +880,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -848,8 +896,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -864,8 +912,8 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
if (IsBackwardBranch(offset)) {
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
- UPDATE_HANDLER_TABLE();
}
ADVANCE(offset);
} else {
@@ -2306,8 +2354,10 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
CHECK(self->IsExceptionPending());
if (UNLIKELY(self->TestAllFlags())) {
CheckSuspend(self);
+ UPDATE_HANDLER_TABLE();
}
Object* this_object = shadow_frame.GetThisObject(code_item->ins_size_);
+ instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
uint32_t found_dex_pc = FindNextInstructionFollowingException(self, shadow_frame, dex_pc,
this_object,
instrumentation);
@@ -2320,11 +2370,15 @@ JValue ExecuteGotoImpl(Thread* self, MethodHelper& mh, const DexFile::CodeItem*
}
// Create alternative instruction handlers dedicated to instrumentation.
-#define INSTRUMENTATION_INSTRUCTION_HANDLER(o, code, n, f, r, i, a, v) \
- instrumentation_op_##code: { \
- instrumentation->DexPcMovedEvent(self, shadow_frame.GetThisObject(code_item->ins_size_), \
- shadow_frame.GetMethod(), dex_pc); \
- goto *handlersTable[Instruction::code]; \
+#define INSTRUMENTATION_INSTRUCTION_HANDLER(o, code, n, f, r, i, a, v) \
+ alt_op_##code: { \
+ instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); \
+ if (UNLIKELY(instrumentation->HasDexPcListeners())) { \
+ instrumentation->DexPcMovedEvent(self, shadow_frame.GetThisObject(code_item->ins_size_), \
+ shadow_frame.GetMethod(), dex_pc); \
+ } \
+ UPDATE_HANDLER_TABLE(); \
+ goto *handlersTable[instrumentation::kMainHandlerTable][Instruction::code]; \
}
#include "dex_instruction_list.h"
DEX_INSTRUCTION_LIST(INSTRUMENTATION_INSTRUCTION_HANDLER)
diff --git a/runtime/jni_internal.cc b/runtime/jni_internal.cc
index 29fc7a40c8..ec717c1bee 100644
--- a/runtime/jni_internal.cc
+++ b/runtime/jni_internal.cc
@@ -2357,9 +2357,9 @@ class JNI {
for (jint i = 0; i < method_count; ++i) {
const char* name = methods[i].name;
const char* sig = methods[i].signature;
-
+ bool is_fast = false;
if (*sig == '!') {
- // TODO: fast jni. it's too noisy to log all these.
+ is_fast = true;
++sig;
}
@@ -2382,7 +2382,7 @@ class JNI {
VLOG(jni) << "[Registering JNI native method " << PrettyMethod(m) << "]";
- m->RegisterNative(soa.Self(), methods[i].fnPtr);
+ m->RegisterNative(soa.Self(), methods[i].fnPtr, is_fast);
}
return JNI_OK;
}
@@ -3107,8 +3107,8 @@ void JavaVMExt::DumpReferenceTables(std::ostream& os) {
}
bool JavaVMExt::LoadNativeLibrary(const std::string& path, ClassLoader* class_loader,
- std::string& detail) {
- detail.clear();
+ std::string* detail) {
+ detail->clear();
// See if we've already loaded this library. If we have, and the class loader
// matches, return successfully without doing anything.
@@ -3126,7 +3126,7 @@ bool JavaVMExt::LoadNativeLibrary(const std::string& path, ClassLoader* class_lo
// The library will be associated with class_loader. The JNI
// spec says we can't load the same library into more than one
// class loader.
- StringAppendF(&detail, "Shared library \"%s\" already opened by "
+ StringAppendF(detail, "Shared library \"%s\" already opened by "
"ClassLoader %p; can't open in ClassLoader %p",
path.c_str(), library->GetClassLoader(), class_loader);
LOG(WARNING) << detail;
@@ -3135,7 +3135,7 @@ bool JavaVMExt::LoadNativeLibrary(const std::string& path, ClassLoader* class_lo
VLOG(jni) << "[Shared library \"" << path << "\" already loaded in "
<< "ClassLoader " << class_loader << "]";
if (!library->CheckOnLoadResult()) {
- StringAppendF(&detail, "JNI_OnLoad failed on a previous attempt "
+ StringAppendF(detail, "JNI_OnLoad failed on a previous attempt "
"to load \"%s\"", path.c_str());
return false;
}
@@ -3162,7 +3162,7 @@ bool JavaVMExt::LoadNativeLibrary(const std::string& path, ClassLoader* class_lo
VLOG(jni) << "[Call to dlopen(\"" << path << "\", RTLD_LAZY) returned " << handle << "]";
if (handle == NULL) {
- detail = dlerror();
+ *detail = dlerror();
LOG(ERROR) << "dlopen(\"" << path << "\", RTLD_LAZY) failed: " << detail;
return false;
}
@@ -3212,9 +3212,9 @@ bool JavaVMExt::LoadNativeLibrary(const std::string& path, ClassLoader* class_lo
self->SetClassLoaderOverride(old_class_loader);
if (version == JNI_ERR) {
- StringAppendF(&detail, "JNI_ERR returned from JNI_OnLoad in \"%s\"", path.c_str());
+ StringAppendF(detail, "JNI_ERR returned from JNI_OnLoad in \"%s\"", path.c_str());
} else if (IsBadJniVersion(version)) {
- StringAppendF(&detail, "Bad JNI version returned from JNI_OnLoad in \"%s\": %d",
+ StringAppendF(detail, "Bad JNI version returned from JNI_OnLoad in \"%s\": %d",
path.c_str(), version);
// It's unwise to call dlclose() here, but we can mark it
// as bad and ensure that future load attempts will fail.
diff --git a/runtime/jni_internal.h b/runtime/jni_internal.h
index c73ed48014..888d5e5458 100644
--- a/runtime/jni_internal.h
+++ b/runtime/jni_internal.h
@@ -73,7 +73,7 @@ class JavaVMExt : public JavaVM {
* human-readable description of the error.
*/
bool LoadNativeLibrary(const std::string& path, mirror::ClassLoader* class_loader,
- std::string& detail)
+ std::string* detail)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
/**
diff --git a/runtime/mirror/art_method.cc b/runtime/mirror/art_method.cc
index cd05f41cc2..f5c0e9f216 100644
--- a/runtime/mirror/art_method.cc
+++ b/runtime/mirror/art_method.cc
@@ -306,11 +306,15 @@ bool ArtMethod::IsRegistered() const {
}
extern "C" void art_work_around_app_jni_bugs(JNIEnv*, jobject);
-void ArtMethod::RegisterNative(Thread* self, const void* native_method) {
+void ArtMethod::RegisterNative(Thread* self, const void* native_method, bool is_fast) {
DCHECK(Thread::Current() == self);
CHECK(IsNative()) << PrettyMethod(this);
+ CHECK(!IsFastNative()) << PrettyMethod(this);
CHECK(native_method != NULL) << PrettyMethod(this);
if (!self->GetJniEnv()->vm->work_around_app_jni_bugs) {
+ if (is_fast) {
+ SetAccessFlags(GetAccessFlags() | kAccFastNative);
+ }
SetNativeMethod(native_method);
} else {
// We've been asked to associate this method with the given native method but are working
@@ -328,9 +332,9 @@ void ArtMethod::RegisterNative(Thread* self, const void* native_method) {
}
void ArtMethod::UnregisterNative(Thread* self) {
- CHECK(IsNative()) << PrettyMethod(this);
+ CHECK(IsNative() && !IsFastNative()) << PrettyMethod(this);
// restore stub to lookup native pointer via dlsym
- RegisterNative(self, GetJniDlsymLookupStub());
+ RegisterNative(self, GetJniDlsymLookupStub(), false);
}
void ArtMethod::SetNativeMethod(const void* native_method) {
diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h
index 5d4a6ea0d5..052089373d 100644
--- a/runtime/mirror/art_method.h
+++ b/runtime/mirror/art_method.h
@@ -112,6 +112,10 @@ class MANAGED ArtMethod : public Object {
return (GetAccessFlags() & kAccNative) != 0;
}
+ bool IsFastNative() const {
+ return (GetAccessFlags() & kAccFastNative) != 0;
+ }
+
bool IsAbstract() const {
return (GetAccessFlags() & kAccAbstract) != 0;
}
@@ -307,7 +311,7 @@ class MANAGED ArtMethod : public Object {
bool IsRegistered() const;
- void RegisterNative(Thread* self, const void* native_method)
+ void RegisterNative(Thread* self, const void* native_method, bool is_fast)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 2b0b1e1755..319ca4a5f9 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -661,7 +661,9 @@ static void SetPreverifiedFlagOnMethods(mirror::ObjectArray<mirror::ArtMethod>*
for (int32_t index = 0, end = methods->GetLength(); index < end; ++index) {
mirror::ArtMethod* method = methods->GetWithoutChecks(index);
DCHECK(method != NULL);
- method->SetPreverified();
+ if (!method->IsNative() && !method->IsAbstract()) {
+ method->SetPreverified();
+ }
}
}
}
diff --git a/runtime/modifiers.h b/runtime/modifiers.h
index 34f4af845b..4e365be8e0 100644
--- a/runtime/modifiers.h
+++ b/runtime/modifiers.h
@@ -46,6 +46,7 @@ static const uint32_t kAccConstructor = 0x00010000; // method (dex only) <init>
static const uint32_t kAccDeclaredSynchronized = 0x00020000; // method (dex only)
static const uint32_t kAccClassIsProxy = 0x00040000; // class (dex only)
static const uint32_t kAccPreverified = 0x00080000; // method (dex only)
+static const uint32_t kAccFastNative = 0x0080000; // method (dex only)
// Special runtime-only flags.
// Note: if only kAccClassIsReference is set, we have a soft reference.
diff --git a/runtime/native/dalvik_system_DexFile.cc b/runtime/native/dalvik_system_DexFile.cc
index 823013a950..4e17b795a5 100644
--- a/runtime/native/dalvik_system_DexFile.cc
+++ b/runtime/native/dalvik_system_DexFile.cc
@@ -95,7 +95,6 @@ static jint DexFile_openDexFileNative(JNIEnv* env, jclass, jstring javaSourceNam
return 0;
}
ScopedObjectAccess soa(env);
-
uint32_t dex_location_checksum;
if (!DexFile::GetChecksum(dex_location, &dex_location_checksum)) {
LOG(WARNING) << "Failed to compute checksum: " << dex_location;
@@ -123,9 +122,10 @@ static jint DexFile_openDexFileNative(JNIEnv* env, jclass, jstring javaSourceNam
return static_cast<jint>(reinterpret_cast<uintptr_t>(dex_file));
}
-static const DexFile* toDexFile(int dex_file_address) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+static const DexFile* toDexFile(int dex_file_address, JNIEnv* env) {
const DexFile* dex_file = reinterpret_cast<const DexFile*>(static_cast<uintptr_t>(dex_file_address));
- if (dex_file == NULL) {
+ if (UNLIKELY(dex_file == nullptr)) {
+ ScopedObjectAccess soa(env);
ThrowNullPointerException(NULL, "dex_file == null");
}
return dex_file;
@@ -133,11 +133,8 @@ static const DexFile* toDexFile(int dex_file_address) SHARED_LOCKS_REQUIRED(Lock
static void DexFile_closeDexFile(JNIEnv* env, jclass, jint cookie) {
const DexFile* dex_file;
- {
- ScopedObjectAccess soa(env);
- dex_file = toDexFile(cookie);
- }
- if (dex_file == NULL) {
+ dex_file = toDexFile(cookie, env);
+ if (dex_file == nullptr) {
return;
}
if (Runtime::Current()->GetClassLinker()->IsDexFileRegistered(*dex_file)) {
@@ -148,8 +145,7 @@ static void DexFile_closeDexFile(JNIEnv* env, jclass, jint cookie) {
static jclass DexFile_defineClassNative(JNIEnv* env, jclass, jstring javaName, jobject javaLoader,
jint cookie) {
- ScopedObjectAccess soa(env);
- const DexFile* dex_file = toDexFile(cookie);
+ const DexFile* dex_file = toDexFile(cookie, env);
if (dex_file == NULL) {
VLOG(class_linker) << "Failed to find dex_file";
return NULL;
@@ -165,6 +161,7 @@ static jclass DexFile_defineClassNative(JNIEnv* env, jclass, jstring javaName, j
VLOG(class_linker) << "Failed to find dex_class_def";
return NULL;
}
+ ScopedObjectAccess soa(env);
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
class_linker->RegisterDexFile(*dex_file);
mirror::ClassLoader* class_loader = soa.Decode<mirror::ClassLoader*>(javaLoader);
@@ -176,12 +173,9 @@ static jclass DexFile_defineClassNative(JNIEnv* env, jclass, jstring javaName, j
static jobjectArray DexFile_getClassNameList(JNIEnv* env, jclass, jint cookie) {
const DexFile* dex_file;
- {
- ScopedObjectAccess soa(env);
- dex_file = toDexFile(cookie);
- }
- if (dex_file == NULL) {
- return NULL;
+ dex_file = toDexFile(cookie, env);
+ if (dex_file == nullptr) {
+ return nullptr;
}
std::vector<std::string> class_names;
diff --git a/runtime/native/dalvik_system_VMRuntime.cc b/runtime/native/dalvik_system_VMRuntime.cc
index dad6eff354..486328cbba 100644
--- a/runtime/native/dalvik_system_VMRuntime.cc
+++ b/runtime/native/dalvik_system_VMRuntime.cc
@@ -20,14 +20,18 @@
#include "common_throws.h"
#include "debugger.h"
#include "dex_file-inl.h"
+#include "gc/accounting/card_table-inl.h"
#include "gc/allocator/dlmalloc.h"
#include "gc/heap.h"
#include "gc/space/dlmalloc_space.h"
+#include "intern_table.h"
#include "jni_internal.h"
+#include "mirror/art_method-inl.h"
#include "mirror/class-inl.h"
-#include "mirror/object.h"
+#include "mirror/dex_cache-inl.h"
#include "mirror/object-inl.h"
#include "object_utils.h"
+#include "scoped_fast_native_object_access.h"
#include "scoped_thread_state_change.h"
#include "thread.h"
#include "thread_list.h"
@@ -49,8 +53,11 @@ static void VMRuntime_startJitCompilation(JNIEnv*, jobject) {
static void VMRuntime_disableJitCompilation(JNIEnv*, jobject) {
}
-static jobject VMRuntime_newNonMovableArray(JNIEnv* env, jobject, jclass javaElementClass, jint length) {
- ScopedObjectAccess soa(env);
+static jobject VMRuntime_newNonMovableArray(JNIEnv* env,
+ jobject,
+ jclass javaElementClass,
+ jint length) {
+ ScopedFastNativeObjectAccess soa(env);
#ifdef MOVING_GARBAGE_COLLECTOR
// TODO: right now, we don't have a copying collector, so there's no need
// to do anything special here, but we ought to pass the non-movability
@@ -81,7 +88,7 @@ static jlong VMRuntime_addressOf(JNIEnv* env, jobject, jobject javaArray) {
if (javaArray == NULL) { // Most likely allocation failed
return 0;
}
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Array* array = soa.Decode<mirror::Array*>(javaArray);
if (!array->IsArrayInstance()) {
ThrowIllegalArgumentException(NULL, "not an array");
@@ -147,21 +154,21 @@ static void VMRuntime_setTargetSdkVersion(JNIEnv* env, jobject, jint targetSdkVe
}
static void VMRuntime_registerNativeAllocation(JNIEnv* env, jobject, jint bytes) {
- ScopedObjectAccess soa(env);
- if (bytes < 0) {
+ if (UNLIKELY(bytes < 0)) {
+ ScopedObjectAccess soa(env);
ThrowRuntimeException("allocation size negative %d", bytes);
return;
}
- Runtime::Current()->GetHeap()->RegisterNativeAllocation(bytes);
+ Runtime::Current()->GetHeap()->RegisterNativeAllocation(env, bytes);
}
static void VMRuntime_registerNativeFree(JNIEnv* env, jobject, jint bytes) {
- ScopedObjectAccess soa(env);
- if (bytes < 0) {
+ if (UNLIKELY(bytes < 0)) {
+ ScopedObjectAccess soa(env);
ThrowRuntimeException("allocation size negative %d", bytes);
return;
}
- Runtime::Current()->GetHeap()->RegisterNativeFree(bytes);
+ Runtime::Current()->GetHeap()->RegisterNativeFree(env, bytes);
}
static void VMRuntime_trimHeap(JNIEnv*, jobject) {
@@ -169,7 +176,8 @@ static void VMRuntime_trimHeap(JNIEnv*, jobject) {
// Trim the managed heap.
gc::Heap* heap = Runtime::Current()->GetHeap();
- float managed_utilization = static_cast<float>(heap->GetBytesAllocated()) / heap->GetTotalMemory();
+ float managed_utilization = (static_cast<float>(heap->GetBytesAllocated()) /
+ heap->GetTotalMemory());
size_t managed_reclaimed = heap->Trim();
uint64_t gc_heap_end_ns = NanoTime();
@@ -189,12 +197,318 @@ static void VMRuntime_trimHeap(JNIEnv*, jobject) {
}
static void VMRuntime_concurrentGC(JNIEnv* env, jobject) {
- Thread* self = static_cast<JNIEnvExt*>(env)->self;
+ Thread* self = ThreadForEnv(env);
Runtime::Current()->GetHeap()->ConcurrentGC(self);
}
+typedef std::map<std::string, mirror::String*> StringTable;
+
+static mirror::Object* PreloadDexCachesStringsVisitor(mirror::Object* root, void* arg) {
+ StringTable& table = *reinterpret_cast<StringTable*>(arg);
+ mirror::String* string = const_cast<mirror::Object*>(root)->AsString();
+ // LOG(INFO) << "VMRuntime.preloadDexCaches interned=" << string->ToModifiedUtf8();
+ table[string->ToModifiedUtf8()] = string;
+ return root;
+}
+
+// Based on ClassLinker::ResolveString.
+static void PreloadDexCachesResolveString(mirror::DexCache* dex_cache,
+ uint32_t string_idx,
+ StringTable& strings)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::String* string = dex_cache->GetResolvedString(string_idx);
+ if (string != NULL) {
+ return;
+ }
+ const DexFile* dex_file = dex_cache->GetDexFile();
+ uint32_t utf16Size;
+ const char* utf8 = dex_file->StringDataAndLengthByIdx(string_idx, &utf16Size);
+ string = strings[utf8];
+ if (string == NULL) {
+ return;
+ }
+ // LOG(INFO) << "VMRuntime.preloadDexCaches resolved string=" << utf8;
+ dex_cache->SetResolvedString(string_idx, string);
+}
+
+// Based on ClassLinker::ResolveType.
+static void PreloadDexCachesResolveType(mirror::DexCache* dex_cache, uint32_t type_idx)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::Class* klass = dex_cache->GetResolvedType(type_idx);
+ if (klass != NULL) {
+ return;
+ }
+ const DexFile* dex_file = dex_cache->GetDexFile();
+ const char* class_name = dex_file->StringByTypeIdx(type_idx);
+ ClassLinker* linker = Runtime::Current()->GetClassLinker();
+ if (class_name[1] == '\0') {
+ klass = linker->FindPrimitiveClass(class_name[0]);
+ } else {
+ klass = linker->LookupClass(class_name, NULL);
+ }
+ if (klass == NULL) {
+ return;
+ }
+ // LOG(INFO) << "VMRuntime.preloadDexCaches resolved klass=" << class_name;
+ dex_cache->SetResolvedType(type_idx, klass);
+ // Skip uninitialized classes because filled static storage entry implies it is initialized.
+ if (!klass->IsInitialized()) {
+ // LOG(INFO) << "VMRuntime.preloadDexCaches uninitialized klass=" << class_name;
+ return;
+ }
+ // LOG(INFO) << "VMRuntime.preloadDexCaches static storage klass=" << class_name;
+ dex_cache->GetInitializedStaticStorage()->Set(type_idx, klass);
+}
+
+// Based on ClassLinker::ResolveField.
+static void PreloadDexCachesResolveField(mirror::DexCache* dex_cache,
+ uint32_t field_idx,
+ bool is_static)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtField* field = dex_cache->GetResolvedField(field_idx);
+ if (field != NULL) {
+ return;
+ }
+ const DexFile* dex_file = dex_cache->GetDexFile();
+ const DexFile::FieldId& field_id = dex_file->GetFieldId(field_idx);
+ mirror::Class* klass = dex_cache->GetResolvedType(field_id.class_idx_);
+ if (klass == NULL) {
+ return;
+ }
+ if (is_static) {
+ field = klass->FindStaticField(dex_cache, field_idx);
+ } else {
+ field = klass->FindInstanceField(dex_cache, field_idx);
+ }
+ if (field == NULL) {
+ return;
+ }
+ // LOG(INFO) << "VMRuntime.preloadDexCaches resolved field " << PrettyField(field);
+ dex_cache->SetResolvedField(field_idx, field);
+}
+
+// Based on ClassLinker::ResolveMethod.
+static void PreloadDexCachesResolveMethod(mirror::DexCache* dex_cache,
+ uint32_t method_idx,
+ InvokeType invoke_type)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtMethod* method = dex_cache->GetResolvedMethod(method_idx);
+ if (method != NULL) {
+ return;
+ }
+ const DexFile* dex_file = dex_cache->GetDexFile();
+ const DexFile::MethodId& method_id = dex_file->GetMethodId(method_idx);
+ mirror::Class* klass = dex_cache->GetResolvedType(method_id.class_idx_);
+ if (klass == NULL) {
+ return;
+ }
+ switch (invoke_type) {
+ case kDirect:
+ case kStatic:
+ method = klass->FindDirectMethod(dex_cache, method_idx);
+ break;
+ case kInterface:
+ method = klass->FindInterfaceMethod(dex_cache, method_idx);
+ break;
+ case kSuper:
+ case kVirtual:
+ method = klass->FindVirtualMethod(dex_cache, method_idx);
+ break;
+ default:
+ LOG(FATAL) << "Unreachable - invocation type: " << invoke_type;
+ }
+ if (method == NULL) {
+ return;
+ }
+ // LOG(INFO) << "VMRuntime.preloadDexCaches resolved method " << PrettyMethod(method);
+ dex_cache->SetResolvedMethod(method_idx, method);
+}
+
+struct DexCacheStats {
+ uint32_t num_strings;
+ uint32_t num_types;
+ uint32_t num_fields;
+ uint32_t num_methods;
+ uint32_t num_static_storage;
+ DexCacheStats() : num_strings(0),
+ num_types(0),
+ num_fields(0),
+ num_methods(0),
+ num_static_storage(0) {}
+};
+
+static const bool kPreloadDexCachesEnabled = true;
+
+// Disabled because it takes a long time (extra half second) but
+// gives almost no benefit in terms of saving private dirty pages.
+static const bool kPreloadDexCachesStrings = false;
+
+static const bool kPreloadDexCachesTypes = true;
+static const bool kPreloadDexCachesFieldsAndMethods = true;
+
+static const bool kPreloadDexCachesCollectStats = true;
+
+static void PreloadDexCachesStatsTotal(DexCacheStats* total) {
+ if (!kPreloadDexCachesCollectStats) {
+ return;
+ }
+
+ ClassLinker* linker = Runtime::Current()->GetClassLinker();
+ const std::vector<const DexFile*>& boot_class_path = linker->GetBootClassPath();
+ for (size_t i = 0; i< boot_class_path.size(); i++) {
+ const DexFile* dex_file = boot_class_path[i];
+ CHECK(dex_file != NULL);
+ total->num_strings += dex_file->NumStringIds();
+ total->num_fields += dex_file->NumFieldIds();
+ total->num_methods += dex_file->NumMethodIds();
+ total->num_types += dex_file->NumTypeIds();
+ total->num_static_storage += dex_file->NumTypeIds();
+ }
+}
+
+static void PreloadDexCachesStatsFilled(DexCacheStats* filled)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ if (!kPreloadDexCachesCollectStats) {
+ return;
+ }
+ ClassLinker* linker = Runtime::Current()->GetClassLinker();
+ const std::vector<const DexFile*>& boot_class_path = linker->GetBootClassPath();
+ for (size_t i = 0; i< boot_class_path.size(); i++) {
+ const DexFile* dex_file = boot_class_path[i];
+ CHECK(dex_file != NULL);
+ mirror::DexCache* dex_cache = linker->FindDexCache(*dex_file);
+ for (size_t i = 0; i < dex_cache->NumStrings(); i++) {
+ mirror::String* string = dex_cache->GetResolvedString(i);
+ if (string != NULL) {
+ filled->num_strings++;
+ }
+ }
+ for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
+ mirror::Class* klass = dex_cache->GetResolvedType(i);
+ if (klass != NULL) {
+ filled->num_types++;
+ }
+ }
+ for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
+ mirror::ArtField* field = dex_cache->GetResolvedField(i);
+ if (field != NULL) {
+ filled->num_fields++;
+ }
+ }
+ for (size_t i = 0; i < dex_cache->NumResolvedMethods(); i++) {
+ mirror::ArtMethod* method = dex_cache->GetResolvedMethod(i);
+ if (method != NULL) {
+ filled->num_methods++;
+ }
+ }
+ for (size_t i = 0; i < dex_cache->NumInitializedStaticStorage(); i++) {
+ mirror::StaticStorageBase* klass = dex_cache->GetInitializedStaticStorage()->Get(i);
+ if (klass != NULL) {
+ filled->num_static_storage++;
+ }
+ }
+ }
+}
+
+// TODO: http://b/11309598 This code was ported over based on the
+// Dalvik version. However, ART has similar code in other places such
+// as the CompilerDriver. This code could probably be refactored to
+// serve both uses.
+static void VMRuntime_preloadDexCaches(JNIEnv* env, jobject) {
+ if (!kPreloadDexCachesEnabled) {
+ return;
+ }
+
+ ScopedObjectAccess soa(env);
+
+ DexCacheStats total;
+ DexCacheStats before;
+ if (kPreloadDexCachesCollectStats) {
+ LOG(INFO) << "VMRuntime.preloadDexCaches starting";
+ PreloadDexCachesStatsTotal(&total);
+ PreloadDexCachesStatsFilled(&before);
+ }
+
+ Runtime* runtime = Runtime::Current();
+ ClassLinker* linker = runtime->GetClassLinker();
+
+ // We use a std::map to avoid heap allocating StringObjects to lookup in gDvm.literalStrings
+ StringTable strings;
+ if (kPreloadDexCachesStrings) {
+ runtime->GetInternTable()->VisitRoots(PreloadDexCachesStringsVisitor, &strings, false, false);
+ }
+
+ const std::vector<const DexFile*>& boot_class_path = linker->GetBootClassPath();
+ for (size_t i = 0; i< boot_class_path.size(); i++) {
+ const DexFile* dex_file = boot_class_path[i];
+ CHECK(dex_file != NULL);
+ mirror::DexCache* dex_cache = linker->FindDexCache(*dex_file);
+
+ if (kPreloadDexCachesStrings) {
+ for (size_t i = 0; i < dex_cache->NumStrings(); i++) {
+ PreloadDexCachesResolveString(dex_cache, i, strings);
+ }
+ }
+
+ if (kPreloadDexCachesTypes) {
+ for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
+ PreloadDexCachesResolveType(dex_cache, i);
+ }
+ }
+
+ if (kPreloadDexCachesFieldsAndMethods) {
+ for (size_t class_def_index = 0;
+ class_def_index < dex_file->NumClassDefs();
+ class_def_index++) {
+ const DexFile::ClassDef& class_def = dex_file->GetClassDef(class_def_index);
+ const byte* class_data = dex_file->GetClassData(class_def);
+ if (class_data == NULL) {
+ continue;
+ }
+ ClassDataItemIterator it(*dex_file, class_data);
+ for (; it.HasNextStaticField(); it.Next()) {
+ uint32_t field_idx = it.GetMemberIndex();
+ PreloadDexCachesResolveField(dex_cache, field_idx, true);
+ }
+ for (; it.HasNextInstanceField(); it.Next()) {
+ uint32_t field_idx = it.GetMemberIndex();
+ PreloadDexCachesResolveField(dex_cache, field_idx, false);
+ }
+ for (; it.HasNextDirectMethod(); it.Next()) {
+ uint32_t method_idx = it.GetMemberIndex();
+ InvokeType invoke_type = it.GetMethodInvokeType(class_def);
+ PreloadDexCachesResolveMethod(dex_cache, method_idx, invoke_type);
+ }
+ for (; it.HasNextVirtualMethod(); it.Next()) {
+ uint32_t method_idx = it.GetMemberIndex();
+ InvokeType invoke_type = it.GetMethodInvokeType(class_def);
+ PreloadDexCachesResolveMethod(dex_cache, method_idx, invoke_type);
+ }
+ }
+ }
+ }
+
+ if (kPreloadDexCachesCollectStats) {
+ DexCacheStats after;
+ PreloadDexCachesStatsFilled(&after);
+ LOG(INFO) << StringPrintf("VMRuntime.preloadDexCaches strings total=%d before=%d after=%d",
+ total.num_strings, before.num_strings, after.num_strings);
+ LOG(INFO) << StringPrintf("VMRuntime.preloadDexCaches types total=%d before=%d after=%d",
+ total.num_types, before.num_types, after.num_types);
+ LOG(INFO) << StringPrintf("VMRuntime.preloadDexCaches fields total=%d before=%d after=%d",
+ total.num_fields, before.num_fields, after.num_fields);
+ LOG(INFO) << StringPrintf("VMRuntime.preloadDexCaches methods total=%d before=%d after=%d",
+ total.num_methods, before.num_methods, after.num_methods);
+ LOG(INFO) << StringPrintf("VMRuntime.preloadDexCaches storage total=%d before=%d after=%d",
+ total.num_static_storage,
+ before.num_static_storage,
+ after.num_static_storage);
+ LOG(INFO) << StringPrintf("VMRuntime.preloadDexCaches finished");
+ }
+}
+
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(VMRuntime, addressOf, "(Ljava/lang/Object;)J"),
+ NATIVE_METHOD(VMRuntime, addressOf, "!(Ljava/lang/Object;)J"),
NATIVE_METHOD(VMRuntime, bootClassPath, "()Ljava/lang/String;"),
NATIVE_METHOD(VMRuntime, classPath, "()Ljava/lang/String;"),
NATIVE_METHOD(VMRuntime, clearGrowthLimit, "()V"),
@@ -203,7 +517,7 @@ static JNINativeMethod gMethods[] = {
NATIVE_METHOD(VMRuntime, getTargetHeapUtilization, "()F"),
NATIVE_METHOD(VMRuntime, isDebuggerActive, "()Z"),
NATIVE_METHOD(VMRuntime, nativeSetTargetHeapUtilization, "(F)V"),
- NATIVE_METHOD(VMRuntime, newNonMovableArray, "(Ljava/lang/Class;I)Ljava/lang/Object;"),
+ NATIVE_METHOD(VMRuntime, newNonMovableArray, "!(Ljava/lang/Class;I)Ljava/lang/Object;"),
NATIVE_METHOD(VMRuntime, properties, "()[Ljava/lang/String;"),
NATIVE_METHOD(VMRuntime, setTargetSdkVersion, "(I)V"),
NATIVE_METHOD(VMRuntime, registerNativeAllocation, "(I)V"),
@@ -212,6 +526,7 @@ static JNINativeMethod gMethods[] = {
NATIVE_METHOD(VMRuntime, trimHeap, "()V"),
NATIVE_METHOD(VMRuntime, vmVersion, "()Ljava/lang/String;"),
NATIVE_METHOD(VMRuntime, vmLibrary, "()Ljava/lang/String;"),
+ NATIVE_METHOD(VMRuntime, preloadDexCaches, "()V"),
};
void register_dalvik_system_VMRuntime(JNIEnv* env) {
diff --git a/runtime/native/dalvik_system_VMStack.cc b/runtime/native/dalvik_system_VMStack.cc
index 5508270bc2..f91536544a 100644
--- a/runtime/native/dalvik_system_VMStack.cc
+++ b/runtime/native/dalvik_system_VMStack.cc
@@ -20,6 +20,7 @@
#include "mirror/class-inl.h"
#include "mirror/class_loader.h"
#include "mirror/object-inl.h"
+#include "scoped_fast_native_object_access.h"
#include "scoped_thread_state_change.h"
#include "thread_list.h"
@@ -66,7 +67,7 @@ static jint VMStack_fillStackTraceElements(JNIEnv* env, jclass, jobject javaThre
// Returns the defining class loader of the caller's caller.
static jobject VMStack_getCallingClassLoader(JNIEnv* env, jclass) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
NthCallerVisitor visitor(soa.Self(), 2);
visitor.WalkStack();
return soa.AddLocalReference<jobject>(visitor.caller->GetDeclaringClass()->GetClassLoader());
@@ -93,7 +94,7 @@ static jobject VMStack_getClosestUserClassLoader(JNIEnv* env, jclass, jobject ja
mirror::Object* system;
mirror::Object* class_loader;
};
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* bootstrap = soa.Decode<mirror::Object*>(javaBootstrap);
mirror::Object* system = soa.Decode<mirror::Object*>(javaSystem);
ClosestUserClassLoaderVisitor visitor(soa.Self(), bootstrap, system);
@@ -103,7 +104,7 @@ static jobject VMStack_getClosestUserClassLoader(JNIEnv* env, jclass, jobject ja
// Returns the class of the caller's caller's caller.
static jclass VMStack_getStackClass2(JNIEnv* env, jclass) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
NthCallerVisitor visitor(soa.Self(), 3);
visitor.WalkStack();
return soa.AddLocalReference<jclass>(visitor.caller->GetDeclaringClass());
@@ -119,9 +120,9 @@ static jobjectArray VMStack_getThreadStackTrace(JNIEnv* env, jclass, jobject jav
static JNINativeMethod gMethods[] = {
NATIVE_METHOD(VMStack, fillStackTraceElements, "(Ljava/lang/Thread;[Ljava/lang/StackTraceElement;)I"),
- NATIVE_METHOD(VMStack, getCallingClassLoader, "()Ljava/lang/ClassLoader;"),
- NATIVE_METHOD(VMStack, getClosestUserClassLoader, "(Ljava/lang/ClassLoader;Ljava/lang/ClassLoader;)Ljava/lang/ClassLoader;"),
- NATIVE_METHOD(VMStack, getStackClass2, "()Ljava/lang/Class;"),
+ NATIVE_METHOD(VMStack, getCallingClassLoader, "!()Ljava/lang/ClassLoader;"),
+ NATIVE_METHOD(VMStack, getClosestUserClassLoader, "!(Ljava/lang/ClassLoader;Ljava/lang/ClassLoader;)Ljava/lang/ClassLoader;"),
+ NATIVE_METHOD(VMStack, getStackClass2, "!()Ljava/lang/Class;"),
NATIVE_METHOD(VMStack, getThreadStackTrace, "(Ljava/lang/Thread;)[Ljava/lang/StackTraceElement;"),
};
diff --git a/runtime/native/java_lang_Class.cc b/runtime/native/java_lang_Class.cc
index d3011cb013..3591611185 100644
--- a/runtime/native/java_lang_Class.cc
+++ b/runtime/native/java_lang_Class.cc
@@ -24,13 +24,14 @@
#include "mirror/proxy.h"
#include "object_utils.h"
#include "scoped_thread_state_change.h"
+#include "scoped_fast_native_object_access.h"
#include "ScopedLocalRef.h"
#include "ScopedUtfChars.h"
#include "well_known_classes.h"
namespace art {
-static mirror::Class* DecodeClass(const ScopedObjectAccess& soa, jobject java_class)
+static mirror::Class* DecodeClass(const ScopedFastNativeObjectAccess& soa, jobject java_class)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
mirror::Class* c = soa.Decode<mirror::Class*>(java_class);
DCHECK(c != NULL);
@@ -79,13 +80,13 @@ static jclass Class_classForName(JNIEnv* env, jclass, jstring javaName, jboolean
}
static jstring Class_getNameNative(JNIEnv* env, jobject javaThis) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Class* c = DecodeClass(soa, javaThis);
return soa.AddLocalReference<jstring>(c->ComputeName());
}
static jobjectArray Class_getProxyInterfaces(JNIEnv* env, jobject javaThis) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::SynthesizedProxyClass* c =
down_cast<mirror::SynthesizedProxyClass*>(DecodeClass(soa, javaThis));
return soa.AddLocalReference<jobjectArray>(c->GetInterfaces()->Clone(soa.Self()));
@@ -93,8 +94,8 @@ static jobjectArray Class_getProxyInterfaces(JNIEnv* env, jobject javaThis) {
static JNINativeMethod gMethods[] = {
NATIVE_METHOD(Class, classForName, "(Ljava/lang/String;ZLjava/lang/ClassLoader;)Ljava/lang/Class;"),
- NATIVE_METHOD(Class, getNameNative, "()Ljava/lang/String;"),
- NATIVE_METHOD(Class, getProxyInterfaces, "()[Ljava/lang/Class;"),
+ NATIVE_METHOD(Class, getNameNative, "!()Ljava/lang/String;"),
+ NATIVE_METHOD(Class, getProxyInterfaces, "!()[Ljava/lang/Class;"),
};
void register_java_lang_Class(JNIEnv* env) {
diff --git a/runtime/native/java_lang_DexCache.cc b/runtime/native/java_lang_DexCache.cc
index 9b83206c37..51cd5b80d5 100644
--- a/runtime/native/java_lang_DexCache.cc
+++ b/runtime/native/java_lang_DexCache.cc
@@ -17,13 +17,13 @@
#include "dex_file.h"
#include "mirror/dex_cache.h"
#include "mirror/object-inl.h"
-#include "scoped_thread_state_change.h"
+#include "scoped_fast_native_object_access.h"
#include "well_known_classes.h"
namespace art {
static jobject DexCache_getDexNative(JNIEnv* env, jobject javaDexCache) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::DexCache* dex_cache = soa.Decode<mirror::DexCache*>(javaDexCache);
// Should only be called while holding the lock on the dex cache.
DCHECK_EQ(dex_cache->GetLockOwnerThreadId(), soa.Self()->GetThreadId());
@@ -46,7 +46,7 @@ static jobject DexCache_getDexNative(JNIEnv* env, jobject javaDexCache) {
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(DexCache, getDexNative, "()Lcom/android/dex/Dex;"),
+ NATIVE_METHOD(DexCache, getDexNative, "!()Lcom/android/dex/Dex;"),
};
void register_java_lang_DexCache(JNIEnv* env) {
diff --git a/runtime/native/java_lang_Object.cc b/runtime/native/java_lang_Object.cc
index 5db7a330a8..4768f48d9c 100644
--- a/runtime/native/java_lang_Object.cc
+++ b/runtime/native/java_lang_Object.cc
@@ -16,7 +16,7 @@
#include "jni_internal.h"
#include "mirror/object-inl.h"
-#include "scoped_thread_state_change.h"
+#include "scoped_fast_native_object_access.h"
// TODO: better support for overloading.
#undef NATIVE_METHOD
@@ -26,41 +26,41 @@
namespace art {
static jobject Object_internalClone(JNIEnv* env, jobject java_this) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* o = soa.Decode<mirror::Object*>(java_this);
return soa.AddLocalReference<jobject>(o->Clone(soa.Self()));
}
static void Object_notify(JNIEnv* env, jobject java_this) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* o = soa.Decode<mirror::Object*>(java_this);
o->Notify(soa.Self());
}
static void Object_notifyAll(JNIEnv* env, jobject java_this) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* o = soa.Decode<mirror::Object*>(java_this);
o->NotifyAll(soa.Self());
}
static void Object_wait(JNIEnv* env, jobject java_this) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* o = soa.Decode<mirror::Object*>(java_this);
o->Wait(soa.Self());
}
static void Object_waitJI(JNIEnv* env, jobject java_this, jlong ms, jint ns) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* o = soa.Decode<mirror::Object*>(java_this);
o->Wait(soa.Self(), ms, ns);
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(Object, internalClone, "()Ljava/lang/Object;", internalClone),
- NATIVE_METHOD(Object, notify, "()V", notify),
- NATIVE_METHOD(Object, notifyAll, "()V", notifyAll),
- NATIVE_METHOD(Object, wait, "()V", wait),
- NATIVE_METHOD(Object, wait, "(JI)V", waitJI),
+ NATIVE_METHOD(Object, internalClone, "!()Ljava/lang/Object;", internalClone),
+ NATIVE_METHOD(Object, notify, "!()V", notify),
+ NATIVE_METHOD(Object, notifyAll, "!()V", notifyAll),
+ NATIVE_METHOD(Object, wait, "!()V", wait),
+ NATIVE_METHOD(Object, wait, "!(JI)V", waitJI),
};
void register_java_lang_Object(JNIEnv* env) {
diff --git a/runtime/native/java_lang_Runtime.cc b/runtime/native/java_lang_Runtime.cc
index 55575cf5a2..e969fcf503 100644
--- a/runtime/native/java_lang_Runtime.cc
+++ b/runtime/native/java_lang_Runtime.cc
@@ -41,7 +41,6 @@ static void Runtime_nativeExit(JNIEnv*, jclass, jint status) {
}
static jstring Runtime_nativeLoad(JNIEnv* env, jclass, jstring javaFilename, jobject javaLoader, jstring javaLdLibraryPath) {
- ScopedObjectAccess soa(env);
ScopedUtfChars filename(env, javaFilename);
if (filename.c_str() == NULL) {
return NULL;
@@ -62,12 +61,15 @@ static jstring Runtime_nativeLoad(JNIEnv* env, jclass, jstring javaFilename, job
}
}
- mirror::ClassLoader* classLoader = soa.Decode<mirror::ClassLoader*>(javaLoader);
std::string detail;
- JavaVMExt* vm = Runtime::Current()->GetJavaVM();
- bool success = vm->LoadNativeLibrary(filename.c_str(), classLoader, detail);
- if (success) {
- return NULL;
+ {
+ ScopedObjectAccess soa(env);
+ mirror::ClassLoader* classLoader = soa.Decode<mirror::ClassLoader*>(javaLoader);
+ JavaVMExt* vm = Runtime::Current()->GetJavaVM();
+ bool success = vm->LoadNativeLibrary(filename.c_str(), classLoader, &detail);
+ if (success) {
+ return nullptr;
+ }
}
// Don't let a pending exception from JNI_OnLoad cause a CheckJNI issue with NewStringUTF.
diff --git a/runtime/native/java_lang_String.cc b/runtime/native/java_lang_String.cc
index 3e9c3f36fa..c401d502ff 100644
--- a/runtime/native/java_lang_String.cc
+++ b/runtime/native/java_lang_String.cc
@@ -17,13 +17,14 @@
#include "common_throws.h"
#include "jni_internal.h"
#include "mirror/string.h"
+#include "scoped_fast_native_object_access.h"
#include "scoped_thread_state_change.h"
#include "ScopedLocalRef.h"
namespace art {
static jint String_compareTo(JNIEnv* env, jobject javaThis, jobject javaRhs) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
if (UNLIKELY(javaRhs == NULL)) {
ThrowNullPointerException(NULL, "rhs == null");
return -1;
@@ -33,7 +34,7 @@ static jint String_compareTo(JNIEnv* env, jobject javaThis, jobject javaRhs) {
}
static jint String_fastIndexOf(JNIEnv* env, jobject java_this, jint ch, jint start) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
// This method does not handle supplementary characters. They're dealt with in managed code.
DCHECK_LE(ch, 0xffff);
@@ -42,16 +43,16 @@ static jint String_fastIndexOf(JNIEnv* env, jobject java_this, jint ch, jint sta
}
static jstring String_intern(JNIEnv* env, jobject javaThis) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::String* s = soa.Decode<mirror::String*>(javaThis);
mirror::String* result = s->Intern();
return soa.AddLocalReference<jstring>(result);
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(String, compareTo, "(Ljava/lang/String;)I"),
- NATIVE_METHOD(String, fastIndexOf, "(II)I"),
- NATIVE_METHOD(String, intern, "()Ljava/lang/String;"),
+ NATIVE_METHOD(String, compareTo, "!(Ljava/lang/String;)I"),
+ NATIVE_METHOD(String, fastIndexOf, "!(II)I"),
+ NATIVE_METHOD(String, intern, "!()Ljava/lang/String;"),
};
void register_java_lang_String(JNIEnv* env) {
diff --git a/runtime/native/java_lang_System.cc b/runtime/native/java_lang_System.cc
index 100f5a9b18..6674db2403 100644
--- a/runtime/native/java_lang_System.cc
+++ b/runtime/native/java_lang_System.cc
@@ -22,7 +22,7 @@
#include "mirror/class-inl.h"
#include "mirror/object-inl.h"
#include "mirror/object_array-inl.h"
-#include "scoped_thread_state_change.h"
+#include "scoped_fast_native_object_access.h"
/*
* We make guarantees about the atomicity of accesses to primitive
@@ -179,7 +179,7 @@ static void ThrowArrayStoreException_NotAnArray(const char* identifier, mirror::
}
static void System_arraycopy(JNIEnv* env, jclass, jobject javaSrc, jint srcPos, jobject javaDst, jint dstPos, jint length) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
// Null pointer checks.
if (UNLIKELY(javaSrc == NULL)) {
@@ -317,7 +317,7 @@ static void System_arraycopy(JNIEnv* env, jclass, jobject javaSrc, jint srcPos,
}
static void System_arraycopyCharUnchecked(JNIEnv* env, jclass, jobject javaSrc, jint srcPos, jobject javaDst, jint dstPos, jint length) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
DCHECK(javaSrc != NULL);
DCHECK(javaDst != NULL);
mirror::Object* srcObject = soa.Decode<mirror::Object*>(javaSrc);
@@ -339,15 +339,15 @@ static void System_arraycopyCharUnchecked(JNIEnv* env, jclass, jobject javaSrc,
}
static jint System_identityHashCode(JNIEnv* env, jclass, jobject javaObject) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* o = soa.Decode<mirror::Object*>(javaObject);
return static_cast<jint>(o->IdentityHashCode());
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(System, arraycopy, "(Ljava/lang/Object;ILjava/lang/Object;II)V"),
- NATIVE_METHOD(System, arraycopyCharUnchecked, "([CI[CII)V"),
- NATIVE_METHOD(System, identityHashCode, "(Ljava/lang/Object;)I"),
+ NATIVE_METHOD(System, arraycopy, "!(Ljava/lang/Object;ILjava/lang/Object;II)V"),
+ NATIVE_METHOD(System, arraycopyCharUnchecked, "!([CI[CII)V"),
+ NATIVE_METHOD(System, identityHashCode, "!(Ljava/lang/Object;)I"),
};
void register_java_lang_System(JNIEnv* env) {
diff --git a/runtime/native/java_lang_Thread.cc b/runtime/native/java_lang_Thread.cc
index a9de086785..5b34cfb224 100644
--- a/runtime/native/java_lang_Thread.cc
+++ b/runtime/native/java_lang_Thread.cc
@@ -19,6 +19,7 @@
#include "jni_internal.h"
#include "monitor.h"
#include "mirror/object.h"
+#include "scoped_fast_native_object_access.h"
#include "scoped_thread_state_change.h"
#include "ScopedUtfChars.h"
#include "thread.h"
@@ -27,7 +28,7 @@
namespace art {
static jobject Thread_currentThread(JNIEnv* env, jclass) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
return soa.AddLocalReference<jobject>(soa.Self()->GetPeer());
}
@@ -150,7 +151,7 @@ static void Thread_nativeSetPriority(JNIEnv* env, jobject java_thread, jint new_
}
static void Thread_sleep(JNIEnv* env, jclass, jobject java_lock, jlong ms, jint ns) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* lock = soa.Decode<mirror::Object*>(java_lock);
Monitor::Wait(Thread::Current(), lock, ms, ns, true, kSleeping);
}
@@ -166,7 +167,7 @@ static void Thread_yield(JNIEnv*, jobject) {
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(Thread, currentThread, "()Ljava/lang/Thread;"),
+ NATIVE_METHOD(Thread, currentThread, "!()Ljava/lang/Thread;"),
NATIVE_METHOD(Thread, interrupted, "()Z"),
NATIVE_METHOD(Thread, isInterrupted, "()Z"),
NATIVE_METHOD(Thread, nativeCreate, "(Ljava/lang/Thread;JZ)V"),
@@ -175,7 +176,7 @@ static JNINativeMethod gMethods[] = {
NATIVE_METHOD(Thread, nativeInterrupt, "()V"),
NATIVE_METHOD(Thread, nativeSetName, "(Ljava/lang/String;)V"),
NATIVE_METHOD(Thread, nativeSetPriority, "(I)V"),
- NATIVE_METHOD(Thread, sleep, "(Ljava/lang/Object;JI)V"),
+ NATIVE_METHOD(Thread, sleep, "!(Ljava/lang/Object;JI)V"),
NATIVE_METHOD(Thread, yield, "()V"),
};
diff --git a/runtime/native/java_lang_reflect_Array.cc b/runtime/native/java_lang_reflect_Array.cc
index 45ec0ad5a2..a2d6b18026 100644
--- a/runtime/native/java_lang_reflect_Array.cc
+++ b/runtime/native/java_lang_reflect_Array.cc
@@ -21,13 +21,13 @@
#include "mirror/class-inl.h"
#include "mirror/object-inl.h"
#include "object_utils.h"
-#include "scoped_thread_state_change.h"
+#include "scoped_fast_native_object_access.h"
#include "sirt_ref.h"
namespace art {
static jobject Array_createMultiArray(JNIEnv* env, jclass, jclass javaElementClass, jobject javaDimArray) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
DCHECK(javaElementClass != NULL);
mirror::Class* element_class = soa.Decode<mirror::Class*>(javaElementClass);
DCHECK(element_class->IsClass());
@@ -41,7 +41,7 @@ static jobject Array_createMultiArray(JNIEnv* env, jclass, jclass javaElementCla
}
static jobject Array_createObjectArray(JNIEnv* env, jclass, jclass javaElementClass, jint length) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
DCHECK(javaElementClass != NULL);
mirror::Class* element_class = soa.Decode<mirror::Class*>(javaElementClass);
if (UNLIKELY(length < 0)) {
@@ -63,8 +63,8 @@ static jobject Array_createObjectArray(JNIEnv* env, jclass, jclass javaElementCl
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(Array, createMultiArray, "(Ljava/lang/Class;[I)Ljava/lang/Object;"),
- NATIVE_METHOD(Array, createObjectArray, "(Ljava/lang/Class;I)Ljava/lang/Object;"),
+ NATIVE_METHOD(Array, createMultiArray, "!(Ljava/lang/Class;[I)Ljava/lang/Object;"),
+ NATIVE_METHOD(Array, createObjectArray, "!(Ljava/lang/Class;I)Ljava/lang/Object;"),
};
void register_java_lang_reflect_Array(JNIEnv* env) {
diff --git a/runtime/native/java_lang_reflect_Constructor.cc b/runtime/native/java_lang_reflect_Constructor.cc
index 85556ac16e..aa72755c9d 100644
--- a/runtime/native/java_lang_reflect_Constructor.cc
+++ b/runtime/native/java_lang_reflect_Constructor.cc
@@ -35,6 +35,7 @@ namespace art {
* with an interface, array, or primitive class.
*/
static jobject Constructor_newInstance(JNIEnv* env, jobject javaMethod, jobjectArray javaArgs) {
+ // TODO: ScopedFastNativeObjectAccess
ScopedObjectAccess soa(env);
jobject art_method = soa.Env()->GetObjectField(
javaMethod, WellKnownClasses::java_lang_reflect_AbstractMethod_artMethod);
@@ -68,7 +69,7 @@ static jobject Constructor_newInstance(JNIEnv* env, jobject javaMethod, jobjectA
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(Constructor, newInstance, "([Ljava/lang/Object;)Ljava/lang/Object;"),
+ NATIVE_METHOD(Constructor, newInstance, "!([Ljava/lang/Object;)Ljava/lang/Object;"),
};
void register_java_lang_reflect_Constructor(JNIEnv* env) {
diff --git a/runtime/native/java_lang_reflect_Field.cc b/runtime/native/java_lang_reflect_Field.cc
index 00f89b65ea..4d69a688ac 100644
--- a/runtime/native/java_lang_reflect_Field.cc
+++ b/runtime/native/java_lang_reflect_Field.cc
@@ -23,12 +23,12 @@
#include "mirror/class-inl.h"
#include "object_utils.h"
#include "reflection.h"
-#include "scoped_thread_state_change.h"
+#include "scoped_fast_native_object_access.h"
namespace art {
-static bool GetFieldValue(const ScopedObjectAccess& soa, mirror::Object* o, mirror::ArtField* f,
- JValue& value, bool allow_references)
+static bool GetFieldValue(const ScopedFastNativeObjectAccess& soa, mirror::Object* o,
+ mirror::ArtField* f, JValue& value, bool allow_references)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK_EQ(value.GetJ(), 0LL);
if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(f->GetDeclaringClass(),
@@ -77,8 +77,8 @@ static bool GetFieldValue(const ScopedObjectAccess& soa, mirror::Object* o, mirr
return false;
}
-static bool CheckReceiver(const ScopedObjectAccess& soa, jobject j_rcvr, mirror::ArtField* f,
- mirror::Object*& class_or_rcvr)
+static bool CheckReceiver(const ScopedFastNativeObjectAccess& soa, jobject j_rcvr,
+ mirror::ArtField* f, mirror::Object*& class_or_rcvr)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (f->IsStatic()) {
class_or_rcvr = f->GetDeclaringClass();
@@ -94,7 +94,7 @@ static bool CheckReceiver(const ScopedObjectAccess& soa, jobject j_rcvr, mirror:
}
static jobject Field_get(JNIEnv* env, jobject javaField, jobject javaObj) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::ArtField* f = soa.DecodeField(env->FromReflectedField(javaField));
mirror::Object* o = NULL;
if (!CheckReceiver(soa, javaObj, f, o)) {
@@ -112,7 +112,7 @@ static jobject Field_get(JNIEnv* env, jobject javaField, jobject javaObj) {
static JValue GetPrimitiveField(JNIEnv* env, jobject javaField, jobject javaObj,
char dst_descriptor) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::ArtField* f = soa.DecodeField(env->FromReflectedField(javaField));
mirror::Object* o = NULL;
if (!CheckReceiver(soa, javaObj, f, o)) {
@@ -221,7 +221,7 @@ static void SetFieldValue(mirror::Object* o, mirror::ArtField* f, const JValue&
}
static void Field_set(JNIEnv* env, jobject javaField, jobject javaObj, jobject javaValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::ArtField* f = soa.DecodeField(env->FromReflectedField(javaField));
// Unbox the value, if necessary.
@@ -242,7 +242,7 @@ static void Field_set(JNIEnv* env, jobject javaField, jobject javaObj, jobject j
static void SetPrimitiveField(JNIEnv* env, jobject javaField, jobject javaObj, char src_descriptor,
const JValue& new_value) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::ArtField* f = soa.DecodeField(env->FromReflectedField(javaField));
mirror::Object* o = NULL;
if (!CheckReceiver(soa, javaObj, f, o)) {
@@ -316,24 +316,24 @@ static void Field_setShort(JNIEnv* env, jobject javaField, jobject javaObj, jsho
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(Field, get, "(Ljava/lang/Object;)Ljava/lang/Object;"),
- NATIVE_METHOD(Field, getBoolean, "(Ljava/lang/Object;)Z"),
- NATIVE_METHOD(Field, getByte, "(Ljava/lang/Object;)B"),
- NATIVE_METHOD(Field, getChar, "(Ljava/lang/Object;)C"),
- NATIVE_METHOD(Field, getDouble, "(Ljava/lang/Object;)D"),
- NATIVE_METHOD(Field, getFloat, "(Ljava/lang/Object;)F"),
- NATIVE_METHOD(Field, getInt, "(Ljava/lang/Object;)I"),
- NATIVE_METHOD(Field, getLong, "(Ljava/lang/Object;)J"),
- NATIVE_METHOD(Field, getShort, "(Ljava/lang/Object;)S"),
- NATIVE_METHOD(Field, set, "(Ljava/lang/Object;Ljava/lang/Object;)V"),
- NATIVE_METHOD(Field, setBoolean, "(Ljava/lang/Object;Z)V"),
- NATIVE_METHOD(Field, setByte, "(Ljava/lang/Object;B)V"),
- NATIVE_METHOD(Field, setChar, "(Ljava/lang/Object;C)V"),
- NATIVE_METHOD(Field, setDouble, "(Ljava/lang/Object;D)V"),
- NATIVE_METHOD(Field, setFloat, "(Ljava/lang/Object;F)V"),
- NATIVE_METHOD(Field, setInt, "(Ljava/lang/Object;I)V"),
- NATIVE_METHOD(Field, setLong, "(Ljava/lang/Object;J)V"),
- NATIVE_METHOD(Field, setShort, "(Ljava/lang/Object;S)V"),
+ NATIVE_METHOD(Field, get, "!(Ljava/lang/Object;)Ljava/lang/Object;"),
+ NATIVE_METHOD(Field, getBoolean, "!(Ljava/lang/Object;)Z"),
+ NATIVE_METHOD(Field, getByte, "!(Ljava/lang/Object;)B"),
+ NATIVE_METHOD(Field, getChar, "!(Ljava/lang/Object;)C"),
+ NATIVE_METHOD(Field, getDouble, "!(Ljava/lang/Object;)D"),
+ NATIVE_METHOD(Field, getFloat, "!(Ljava/lang/Object;)F"),
+ NATIVE_METHOD(Field, getInt, "!(Ljava/lang/Object;)I"),
+ NATIVE_METHOD(Field, getLong, "!(Ljava/lang/Object;)J"),
+ NATIVE_METHOD(Field, getShort, "!(Ljava/lang/Object;)S"),
+ NATIVE_METHOD(Field, set, "!(Ljava/lang/Object;Ljava/lang/Object;)V"),
+ NATIVE_METHOD(Field, setBoolean, "!(Ljava/lang/Object;Z)V"),
+ NATIVE_METHOD(Field, setByte, "!(Ljava/lang/Object;B)V"),
+ NATIVE_METHOD(Field, setChar, "!(Ljava/lang/Object;C)V"),
+ NATIVE_METHOD(Field, setDouble, "!(Ljava/lang/Object;D)V"),
+ NATIVE_METHOD(Field, setFloat, "!(Ljava/lang/Object;F)V"),
+ NATIVE_METHOD(Field, setInt, "!(Ljava/lang/Object;I)V"),
+ NATIVE_METHOD(Field, setLong, "!(Ljava/lang/Object;J)V"),
+ NATIVE_METHOD(Field, setShort, "!(Ljava/lang/Object;S)V"),
};
void register_java_lang_reflect_Field(JNIEnv* env) {
diff --git a/runtime/native/org_apache_harmony_dalvik_ddmc_DdmServer.cc b/runtime/native/org_apache_harmony_dalvik_ddmc_DdmServer.cc
index d7cd18dc9c..163ae20628 100644
--- a/runtime/native/org_apache_harmony_dalvik_ddmc_DdmServer.cc
+++ b/runtime/native/org_apache_harmony_dalvik_ddmc_DdmServer.cc
@@ -16,21 +16,21 @@
#include "base/logging.h"
#include "debugger.h"
-#include "scoped_thread_state_change.h"
+#include "scoped_fast_native_object_access.h"
#include "ScopedPrimitiveArray.h"
namespace art {
static void DdmServer_nativeSendChunk(JNIEnv* env, jclass, jint type,
jbyteArray javaData, jint offset, jint length) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
ScopedByteArrayRO data(env, javaData);
DCHECK_LE(offset + length, static_cast<int32_t>(data.size()));
Dbg::DdmSendChunk(type, length, reinterpret_cast<const uint8_t*>(&data[offset]));
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(DdmServer, nativeSendChunk, "(I[BII)V"),
+ NATIVE_METHOD(DdmServer, nativeSendChunk, "!(I[BII)V"),
};
void register_org_apache_harmony_dalvik_ddmc_DdmServer(JNIEnv* env) {
diff --git a/runtime/native/scoped_fast_native_object_access.h b/runtime/native/scoped_fast_native_object_access.h
new file mode 100644
index 0000000000..d941ec31f0
--- /dev/null
+++ b/runtime/native/scoped_fast_native_object_access.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_NATIVE_SCOPED_FAST_NATIVE_OBJECT_ACCESS_H_
+#define ART_RUNTIME_NATIVE_SCOPED_FAST_NATIVE_OBJECT_ACCESS_H_
+
+#include "base/casts.h"
+#include "jni_internal.h"
+#include "thread-inl.h"
+#include "mirror/art_method.h"
+
+namespace art {
+
+// Variant of ScopedObjectAccess that does no runnable transitions. Should only be used by "fast"
+// JNI methods.
+class ScopedFastNativeObjectAccess {
+ public:
+ explicit ScopedFastNativeObjectAccess(JNIEnv* env)
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
+ SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
+ : env_(down_cast<JNIEnvExt*>(env)), self_(ThreadForEnv(env)) {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ DCHECK((*Self()->GetManagedStack()->GetTopQuickFrame())->IsFastNative());
+ // Don't work with raw objects in non-runnable states.
+ DCHECK_EQ(Self()->GetState(), kRunnable);
+ }
+
+ ~ScopedFastNativeObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE {
+ }
+
+ Thread* Self() const {
+ return self_;
+ }
+
+ JNIEnvExt* Env() const {
+ return env_;
+ }
+
+ template<typename T>
+ T Decode(jobject obj) const
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ // Don't work with raw objects in non-runnable states.
+ DCHECK_EQ(Self()->GetState(), kRunnable);
+ return down_cast<T>(Self()->DecodeJObject(obj));
+ }
+
+ mirror::ArtField* DecodeField(jfieldID fid) const
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ // Don't work with raw objects in non-runnable states.
+ DCHECK_EQ(Self()->GetState(), kRunnable);
+#ifdef MOVING_GARBAGE_COLLECTOR
+ // TODO: we should make these unique weak globals if Field instances can ever move.
+ UNIMPLEMENTED(WARNING);
+#endif
+ return reinterpret_cast<mirror::ArtField*>(fid);
+ }
+
+ /*
+ * Variant of ScopedObjectAccessUnched::AddLocalReference that without JNI work arounds
+ * or check JNI that should be being used by fast native methods.
+ */
+ template<typename T>
+ T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
+ // Don't work with raw objects in non-runnable states.
+ DCHECK_EQ(Self()->GetState(), kRunnable);
+ if (obj == NULL) {
+ return NULL;
+ }
+
+ DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
+
+ IndirectReferenceTable& locals = Env()->locals;
+
+ uint32_t cookie = Env()->local_ref_cookie;
+ IndirectRef ref = locals.Add(cookie, obj);
+
+ return reinterpret_cast<T>(ref);
+ }
+
+ private:
+ JNIEnvExt* const env_;
+ Thread* const self_;
+};
+
+} // namespace art
+
+#endif // ART_RUNTIME_NATIVE_SCOPED_FAST_NATIVE_OBJECT_ACCESS_H_
diff --git a/runtime/native/sun_misc_Unsafe.cc b/runtime/native/sun_misc_Unsafe.cc
index eece81a9e8..2c6d2810b1 100644
--- a/runtime/native/sun_misc_Unsafe.cc
+++ b/runtime/native/sun_misc_Unsafe.cc
@@ -19,12 +19,12 @@
#include "jni_internal.h"
#include "mirror/object.h"
#include "mirror/object-inl.h"
-#include "scoped_thread_state_change.h"
+#include "scoped_fast_native_object_access.h"
namespace art {
static jboolean Unsafe_compareAndSwapInt(JNIEnv* env, jobject, jobject javaObj, jlong offset, jint expectedValue, jint newValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
byte* raw_addr = reinterpret_cast<byte*>(obj) + offset;
volatile int32_t* address = reinterpret_cast<volatile int32_t*>(raw_addr);
@@ -34,7 +34,7 @@ static jboolean Unsafe_compareAndSwapInt(JNIEnv* env, jobject, jobject javaObj,
}
static jboolean Unsafe_compareAndSwapLong(JNIEnv* env, jobject, jobject javaObj, jlong offset, jlong expectedValue, jlong newValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
byte* raw_addr = reinterpret_cast<byte*>(obj) + offset;
volatile int64_t* address = reinterpret_cast<volatile int64_t*>(raw_addr);
@@ -44,7 +44,7 @@ static jboolean Unsafe_compareAndSwapLong(JNIEnv* env, jobject, jobject javaObj,
}
static jboolean Unsafe_compareAndSwapObject(JNIEnv* env, jobject, jobject javaObj, jlong offset, jobject javaExpectedValue, jobject javaNewValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
mirror::Object* expectedValue = soa.Decode<mirror::Object*>(javaExpectedValue);
mirror::Object* newValue = soa.Decode<mirror::Object*>(javaNewValue);
@@ -60,97 +60,97 @@ static jboolean Unsafe_compareAndSwapObject(JNIEnv* env, jobject, jobject javaOb
}
static jint Unsafe_getInt(JNIEnv* env, jobject, jobject javaObj, jlong offset) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
return obj->GetField32(MemberOffset(offset), false);
}
static jint Unsafe_getIntVolatile(JNIEnv* env, jobject, jobject javaObj, jlong offset) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
return obj->GetField32(MemberOffset(offset), true);
}
static void Unsafe_putInt(JNIEnv* env, jobject, jobject javaObj, jlong offset, jint newValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
obj->SetField32(MemberOffset(offset), newValue, false);
}
static void Unsafe_putIntVolatile(JNIEnv* env, jobject, jobject javaObj, jlong offset, jint newValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
obj->SetField32(MemberOffset(offset), newValue, true);
}
static void Unsafe_putOrderedInt(JNIEnv* env, jobject, jobject javaObj, jlong offset, jint newValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
ANDROID_MEMBAR_STORE();
obj->SetField32(MemberOffset(offset), newValue, false);
}
static jlong Unsafe_getLong(JNIEnv* env, jobject, jobject javaObj, jlong offset) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
return obj->GetField64(MemberOffset(offset), false);
}
static jlong Unsafe_getLongVolatile(JNIEnv* env, jobject, jobject javaObj, jlong offset) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
return obj->GetField64(MemberOffset(offset), true);
}
static void Unsafe_putLong(JNIEnv* env, jobject, jobject javaObj, jlong offset, jlong newValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
obj->SetField64(MemberOffset(offset), newValue, false);
}
static void Unsafe_putLongVolatile(JNIEnv* env, jobject, jobject javaObj, jlong offset, jlong newValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
obj->SetField64(MemberOffset(offset), newValue, true);
}
static void Unsafe_putOrderedLong(JNIEnv* env, jobject, jobject javaObj, jlong offset, jlong newValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
ANDROID_MEMBAR_STORE();
obj->SetField64(MemberOffset(offset), newValue, false);
}
static jobject Unsafe_getObjectVolatile(JNIEnv* env, jobject, jobject javaObj, jlong offset) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
mirror::Object* value = obj->GetFieldObject<mirror::Object*>(MemberOffset(offset), true);
return soa.AddLocalReference<jobject>(value);
}
static jobject Unsafe_getObject(JNIEnv* env, jobject, jobject javaObj, jlong offset) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
mirror::Object* value = obj->GetFieldObject<mirror::Object*>(MemberOffset(offset), false);
return soa.AddLocalReference<jobject>(value);
}
static void Unsafe_putObject(JNIEnv* env, jobject, jobject javaObj, jlong offset, jobject javaNewValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
mirror::Object* newValue = soa.Decode<mirror::Object*>(javaNewValue);
obj->SetFieldObject(MemberOffset(offset), newValue, false);
}
static void Unsafe_putObjectVolatile(JNIEnv* env, jobject, jobject javaObj, jlong offset, jobject javaNewValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
mirror::Object* newValue = soa.Decode<mirror::Object*>(javaNewValue);
obj->SetFieldObject(MemberOffset(offset), newValue, true);
}
static void Unsafe_putOrderedObject(JNIEnv* env, jobject, jobject javaObj, jlong offset, jobject javaNewValue) {
- ScopedObjectAccess soa(env);
+ ScopedFastNativeObjectAccess soa(env);
mirror::Object* obj = soa.Decode<mirror::Object*>(javaObj);
mirror::Object* newValue = soa.Decode<mirror::Object*>(javaNewValue);
ANDROID_MEMBAR_STORE();
@@ -158,24 +158,24 @@ static void Unsafe_putOrderedObject(JNIEnv* env, jobject, jobject javaObj, jlong
}
static JNINativeMethod gMethods[] = {
- NATIVE_METHOD(Unsafe, compareAndSwapInt, "(Ljava/lang/Object;JII)Z"),
- NATIVE_METHOD(Unsafe, compareAndSwapLong, "(Ljava/lang/Object;JJJ)Z"),
- NATIVE_METHOD(Unsafe, compareAndSwapObject, "(Ljava/lang/Object;JLjava/lang/Object;Ljava/lang/Object;)Z"),
- NATIVE_METHOD(Unsafe, getIntVolatile, "(Ljava/lang/Object;J)I"),
- NATIVE_METHOD(Unsafe, putIntVolatile, "(Ljava/lang/Object;JI)V"),
- NATIVE_METHOD(Unsafe, getLongVolatile, "(Ljava/lang/Object;J)J"),
- NATIVE_METHOD(Unsafe, putLongVolatile, "(Ljava/lang/Object;JJ)V"),
- NATIVE_METHOD(Unsafe, getObjectVolatile, "(Ljava/lang/Object;J)Ljava/lang/Object;"),
- NATIVE_METHOD(Unsafe, putObjectVolatile, "(Ljava/lang/Object;JLjava/lang/Object;)V"),
- NATIVE_METHOD(Unsafe, getInt, "(Ljava/lang/Object;J)I"),
- NATIVE_METHOD(Unsafe, putInt, "(Ljava/lang/Object;JI)V"),
- NATIVE_METHOD(Unsafe, putOrderedInt, "(Ljava/lang/Object;JI)V"),
- NATIVE_METHOD(Unsafe, getLong, "(Ljava/lang/Object;J)J"),
- NATIVE_METHOD(Unsafe, putLong, "(Ljava/lang/Object;JJ)V"),
- NATIVE_METHOD(Unsafe, putOrderedLong, "(Ljava/lang/Object;JJ)V"),
- NATIVE_METHOD(Unsafe, getObject, "(Ljava/lang/Object;J)Ljava/lang/Object;"),
- NATIVE_METHOD(Unsafe, putObject, "(Ljava/lang/Object;JLjava/lang/Object;)V"),
- NATIVE_METHOD(Unsafe, putOrderedObject, "(Ljava/lang/Object;JLjava/lang/Object;)V"),
+ NATIVE_METHOD(Unsafe, compareAndSwapInt, "!(Ljava/lang/Object;JII)Z"),
+ NATIVE_METHOD(Unsafe, compareAndSwapLong, "!(Ljava/lang/Object;JJJ)Z"),
+ NATIVE_METHOD(Unsafe, compareAndSwapObject, "!(Ljava/lang/Object;JLjava/lang/Object;Ljava/lang/Object;)Z"),
+ NATIVE_METHOD(Unsafe, getIntVolatile, "!(Ljava/lang/Object;J)I"),
+ NATIVE_METHOD(Unsafe, putIntVolatile, "!(Ljava/lang/Object;JI)V"),
+ NATIVE_METHOD(Unsafe, getLongVolatile, "!(Ljava/lang/Object;J)J"),
+ NATIVE_METHOD(Unsafe, putLongVolatile, "!(Ljava/lang/Object;JJ)V"),
+ NATIVE_METHOD(Unsafe, getObjectVolatile, "!(Ljava/lang/Object;J)Ljava/lang/Object;"),
+ NATIVE_METHOD(Unsafe, putObjectVolatile, "!(Ljava/lang/Object;JLjava/lang/Object;)V"),
+ NATIVE_METHOD(Unsafe, getInt, "!(Ljava/lang/Object;J)I"),
+ NATIVE_METHOD(Unsafe, putInt, "!(Ljava/lang/Object;JI)V"),
+ NATIVE_METHOD(Unsafe, putOrderedInt, "!(Ljava/lang/Object;JI)V"),
+ NATIVE_METHOD(Unsafe, getLong, "!(Ljava/lang/Object;J)J"),
+ NATIVE_METHOD(Unsafe, putLong, "!(Ljava/lang/Object;JJ)V"),
+ NATIVE_METHOD(Unsafe, putOrderedLong, "!(Ljava/lang/Object;JJ)V"),
+ NATIVE_METHOD(Unsafe, getObject, "!(Ljava/lang/Object;J)Ljava/lang/Object;"),
+ NATIVE_METHOD(Unsafe, putObject, "!(Ljava/lang/Object;JLjava/lang/Object;)V"),
+ NATIVE_METHOD(Unsafe, putOrderedObject, "!(Ljava/lang/Object;JLjava/lang/Object;)V"),
};
void register_sun_misc_Unsafe(JNIEnv* env) {
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index bdedef4cab..f46b794387 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -977,7 +977,7 @@ void Runtime::InitNativeMethods() {
std::string mapped_name(StringPrintf(OS_SHARED_LIB_FORMAT_STR, "javacore"));
std::string reason;
self->TransitionFromSuspendedToRunnable();
- if (!instance_->java_vm_->LoadNativeLibrary(mapped_name, NULL, reason)) {
+ if (!instance_->java_vm_->LoadNativeLibrary(mapped_name, NULL, &reason)) {
LOG(FATAL) << "LoadNativeLibrary failed for \"" << mapped_name << "\": " << reason;
}
self->TransitionFromRunnableToSuspended(kNative);
diff --git a/runtime/runtime.h b/runtime/runtime.h
index dd04ac7302..b6429b646d 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -280,6 +280,7 @@ class Runtime {
}
InternTable* GetInternTable() const {
+ DCHECK(intern_table_ != NULL);
return intern_table_;
}
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h
index d3f3a88d66..c39cdb2679 100644
--- a/runtime/scoped_thread_state_change.h
+++ b/runtime/scoped_thread_state_change.h
@@ -18,7 +18,6 @@
#define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
#include "base/casts.h"
-#include "jni_internal.h"
#include "thread-inl.h"
namespace art {
@@ -122,14 +121,14 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
explicit ScopedObjectAccessUnchecked(JNIEnv* env)
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
: ScopedThreadStateChange(ThreadForEnv(env), kRunnable),
- env_(reinterpret_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
+ env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
self_->VerifyStack();
}
explicit ScopedObjectAccessUnchecked(Thread* self)
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
: ScopedThreadStateChange(self, kRunnable),
- env_(reinterpret_cast<JNIEnvExt*>(self->GetJniEnv())),
+ env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
vm_(env_ != NULL ? env_->vm : NULL) {
self_->VerifyStack();
}
@@ -137,7 +136,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
// Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
// change into Runnable or acquire a share on the mutator_lock_.
explicit ScopedObjectAccessUnchecked(JavaVM* vm)
- : ScopedThreadStateChange(), env_(NULL), vm_(reinterpret_cast<JavaVMExt*>(vm)) {}
+ : ScopedThreadStateChange(), env_(NULL), vm_(down_cast<JavaVMExt*>(vm)) {}
// Here purely to force inlining.
~ScopedObjectAccessUnchecked() ALWAYS_INLINE {
@@ -162,6 +161,7 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
*/
template<typename T>
T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ Locks::mutator_lock_->AssertSharedHeld(Self());
DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
if (obj == NULL) {
return NULL;
@@ -245,11 +245,6 @@ class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
}
private:
- static Thread* ThreadForEnv(JNIEnv* env) {
- JNIEnvExt* full_env(reinterpret_cast<JNIEnvExt*>(env));
- return full_env->self;
- }
-
// The full JNIEnv.
JNIEnvExt* const env_;
// The full JavaVM.
diff --git a/runtime/thread-inl.h b/runtime/thread-inl.h
index 7d28785f58..84496072b1 100644
--- a/runtime/thread-inl.h
+++ b/runtime/thread-inl.h
@@ -21,11 +21,19 @@
#include <pthread.h>
+#include "base/casts.h"
#include "base/mutex-inl.h"
#include "cutils/atomic-inline.h"
+#include "jni_internal.h"
namespace art {
+// Quickly access the current thread from a JNIEnv.
+static inline Thread* ThreadForEnv(JNIEnv* env) {
+ JNIEnvExt* full_env(down_cast<JNIEnvExt*>(env));
+ return full_env->self;
+}
+
inline Thread* Thread::Current() {
// We rely on Thread::Current returning NULL for a detached thread, so it's not obvious
// that we can replace this with a direct %fs access on x86.
diff --git a/runtime/utils.cc b/runtime/utils.cc
index 1386f86948..e2852a6cfa 100644
--- a/runtime/utils.cc
+++ b/runtime/utils.cc
@@ -1200,7 +1200,7 @@ std::string GetDalvikCacheFilenameOrDie(const std::string& location) {
LOG(FATAL) << "Expected path in location to be absolute: "<< location;
}
std::string cache_file(location, 1); // skip leading slash
- if (!EndsWith(location, ".dex") || !EndsWith(location, ".art")) {
+ if (!EndsWith(location, ".dex") && !EndsWith(location, ".art")) {
cache_file += "/";
cache_file += DexFile::kClassesDex;
}
diff --git a/runtime/utils_test.cc b/runtime/utils_test.cc
index 2633964b57..b43177b4fd 100644
--- a/runtime/utils_test.cc
+++ b/runtime/utils_test.cc
@@ -335,4 +335,18 @@ TEST_F(UtilsTest, EndsWith) {
EXPECT_FALSE(EndsWith("oo", "foo"));
}
+void CheckGetDalvikCacheFilenameOrDie(const char* in, const char* out) {
+ std::string expected(getenv("ANDROID_DATA"));
+ expected += "/dalvik-cache/";
+ expected += out;
+ EXPECT_STREQ(expected.c_str(), GetDalvikCacheFilenameOrDie(in).c_str());
+}
+
+TEST_F(UtilsTest, GetDalvikCacheFilenameOrDie) {
+ CheckGetDalvikCacheFilenameOrDie("/system/app/Foo.apk", "system@app@Foo.apk@classes.dex");
+ CheckGetDalvikCacheFilenameOrDie("/data/app/foo-1.apk", "data@app@foo-1.apk@classes.dex");
+ CheckGetDalvikCacheFilenameOrDie("/system/framework/core.jar", "system@framework@core.jar@classes.dex");
+ CheckGetDalvikCacheFilenameOrDie("/system/framework/boot.art", "system@framework@boot.art");
+}
+
} // namespace art
diff --git a/test/JniTest/JniTest.java b/test/JniTest/JniTest.java
index 431056ae32..7014ef9334 100644
--- a/test/JniTest/JniTest.java
+++ b/test/JniTest/JniTest.java
@@ -18,7 +18,28 @@ class JniTest {
public static void main(String[] args) {
System.loadLibrary("arttest");
testFindClassOnAttachedNativeThread();
+ testCallStaticVoidMethodOnSubClass();
}
private static native void testFindClassOnAttachedNativeThread();
+
+ private static void testCallStaticVoidMethodOnSubClass() {
+ testCallStaticVoidMethodOnSubClassNative();
+ if (!testCallStaticVoidMethodOnSubClass_SuperClass.executed) {
+ throw new AssertionError();
+ }
+ }
+
+ private static native void testCallStaticVoidMethodOnSubClassNative();
+
+ private static class testCallStaticVoidMethodOnSubClass_SuperClass {
+ private static boolean executed = false;
+ private static void execute() {
+ executed = true;
+ }
+ }
+
+ private static class testCallStaticVoidMethodOnSubClass_SubClass
+ extends testCallStaticVoidMethodOnSubClass_SuperClass {
+ }
}
diff --git a/test/JniTest/jni_test.cc b/test/JniTest/jni_test.cc
index ed69d39d27..72a3309d9d 100644
--- a/test/JniTest/jni_test.cc
+++ b/test/JniTest/jni_test.cc
@@ -54,6 +54,7 @@ static void* testFindClassOnAttachedNativeThread(void*) {
return NULL;
}
+// http://b/10994325
extern "C" JNIEXPORT void JNICALL Java_JniTest_testFindClassOnAttachedNativeThread(JNIEnv*,
jclass) {
pthread_t pthread;
@@ -65,3 +66,18 @@ extern "C" JNIEXPORT void JNICALL Java_JniTest_testFindClassOnAttachedNativeThre
int pthread_join_result = pthread_join(pthread, NULL);
assert(pthread_join_result == 0);
}
+
+// http://b/11243757
+extern "C" JNIEXPORT void JNICALL Java_JniTest_testCallStaticVoidMethodOnSubClassNative(JNIEnv* env,
+ jclass) {
+ jclass super_class = env->FindClass("JniTest$testCallStaticVoidMethodOnSubClass_SuperClass");
+ assert(super_class != NULL);
+
+ jmethodID execute = env->GetStaticMethodID(super_class, "execute", "()V");
+ assert(execute != NULL);
+
+ jclass sub_class = env->FindClass("JniTest$testCallStaticVoidMethodOnSubClass_SubClass");
+ assert(sub_class != NULL);
+
+ env->CallStaticVoidMethod(sub_class, execute);
+}