summaryrefslogtreecommitdiff
path: root/runtime
diff options
context:
space:
mode:
Diffstat (limited to 'runtime')
-rw-r--r--runtime/Android.bp3
-rw-r--r--runtime/arch/arm/quick_entrypoints_arm.S49
-rw-r--r--runtime/arch/arm64/quick_entrypoints_arm64.S80
-rw-r--r--runtime/arch/mips/quick_entrypoints_mips.S263
-rw-r--r--runtime/arch/mips64/quick_entrypoints_mips64.S259
-rw-r--r--runtime/arch/stub_test.cc4
-rw-r--r--runtime/arch/x86/quick_entrypoints_x86.S54
-rw-r--r--runtime/arch/x86_64/quick_entrypoints_x86_64.S45
-rw-r--r--runtime/art_method.cc53
-rw-r--r--runtime/art_method.h1
-rw-r--r--runtime/base/mutex.cc49
-rw-r--r--runtime/base/mutex.h21
-rw-r--r--runtime/cha.cc18
-rw-r--r--runtime/class_linker.cc315
-rw-r--r--runtime/class_linker.h10
-rw-r--r--runtime/class_linker_test.cc35
-rw-r--r--runtime/class_table_test.cc2
-rw-r--r--runtime/common_throws.cc16
-rw-r--r--runtime/common_throws.h10
-rw-r--r--runtime/debugger.cc18
-rw-r--r--runtime/dex2oat_environment_test.h4
-rw-r--r--runtime/dex_file.cc118
-rw-r--r--runtime/dex_file.h160
-rw-r--r--runtime/dex_file_annotations.cc28
-rw-r--r--runtime/dex_file_test.cc26
-rw-r--r--runtime/dex_file_verifier.cc258
-rw-r--r--runtime/dex_file_verifier.h10
-rw-r--r--runtime/dex_file_verifier_test.cc205
-rw-r--r--runtime/dex_instruction.cc32
-rw-r--r--runtime/dex_instruction.h68
-rw-r--r--runtime/dex_instruction_list.h4
-rw-r--r--runtime/entrypoints/entrypoint_utils.cc2
-rw-r--r--runtime/entrypoints/quick/quick_dexcache_entrypoints.cc17
-rw-r--r--runtime/entrypoints/quick/quick_field_entrypoints.cc2
-rw-r--r--runtime/entrypoints/quick/quick_trampoline_entrypoints.cc21
-rw-r--r--runtime/gc/allocation_record.cc2
-rw-r--r--runtime/gc/collector/concurrent_copying.cc58
-rw-r--r--runtime/gc/reference_processor.cc4
-rw-r--r--runtime/gc/reference_queue_test.cc14
-rw-r--r--runtime/gc/space/image_space.cc123
-rw-r--r--runtime/gc/space/image_space.h11
-rw-r--r--runtime/gc/space/image_space_test.cc111
-rw-r--r--runtime/gc/space/space_create_test.cc16
-rw-r--r--runtime/gc/space/space_test.h4
-rw-r--r--runtime/gc/system_weak.h2
-rw-r--r--runtime/handle.h8
-rw-r--r--runtime/imtable_test.cc4
-rw-r--r--runtime/indirect_reference_table_test.cc20
-rw-r--r--runtime/instrumentation.cc9
-rw-r--r--runtime/intern_table_test.cc14
-rw-r--r--runtime/interpreter/interpreter_common.cc336
-rw-r--r--runtime/interpreter/interpreter_common.h12
-rw-r--r--runtime/interpreter/interpreter_switch_impl.cc22
-rw-r--r--runtime/interpreter/unstarted_runtime.cc22
-rw-r--r--runtime/interpreter/unstarted_runtime_test.cc4
-rw-r--r--runtime/java_vm_ext.cc6
-rw-r--r--runtime/jdwp/jdwp.h5
-rw-r--r--runtime/jdwp/jdwp_event.cc37
-rw-r--r--runtime/jdwp/object_registry.cc4
-rw-r--r--runtime/jit/jit_code_cache.cc15
-rw-r--r--runtime/jit/profile_compilation_info.cc32
-rw-r--r--runtime/jit/profile_compilation_info.h5
-rw-r--r--runtime/jni_internal.cc2
-rw-r--r--runtime/jobject_comparator.cc4
-rw-r--r--runtime/mem_map.cc2
-rw-r--r--runtime/method_handles.cc138
-rw-r--r--runtime/method_handles.h6
-rw-r--r--runtime/mirror/array.cc6
-rw-r--r--runtime/mirror/call_site.cc52
-rw-r--r--runtime/mirror/call_site.h64
-rw-r--r--runtime/mirror/class.cc14
-rw-r--r--runtime/mirror/class_ext.cc4
-rw-r--r--runtime/mirror/dex_cache-inl.h46
-rw-r--r--runtime/mirror/dex_cache.cc14
-rw-r--r--runtime/mirror/dex_cache.h51
-rw-r--r--runtime/mirror/dex_cache_test.cc6
-rw-r--r--runtime/mirror/emulated_stack_frame.cc4
-rw-r--r--runtime/mirror/field-inl.h4
-rw-r--r--runtime/mirror/method_handle_impl.cc24
-rw-r--r--runtime/mirror/method_handle_impl.h23
-rw-r--r--runtime/mirror/method_handles_lookup.cc58
-rw-r--r--runtime/mirror/method_handles_lookup.h70
-rw-r--r--runtime/mirror/method_type.cc16
-rw-r--r--runtime/mirror/method_type.h4
-rw-r--r--runtime/mirror/method_type_test.cc2
-rw-r--r--runtime/mirror/object_test.cc10
-rw-r--r--runtime/mirror/string-inl.h9
-rw-r--r--runtime/mirror/string.h4
-rw-r--r--runtime/monitor.cc2
-rw-r--r--runtime/monitor_test.cc4
-rw-r--r--runtime/native/dalvik_system_VMRuntime.cc4
-rw-r--r--runtime/native/java_lang_Class.cc22
-rw-r--r--runtime/native/java_lang_invoke_MethodHandleImpl.cc2
-rw-r--r--runtime/native/java_lang_reflect_Executable.cc6
-rw-r--r--runtime/native/libcore_util_CharsetUtils.cc4
-rw-r--r--runtime/oat.h2
-rw-r--r--runtime/oat_file.cc115
-rw-r--r--runtime/oat_file.h9
-rw-r--r--runtime/oat_file_assistant.cc163
-rw-r--r--runtime/oat_file_assistant.h24
-rw-r--r--runtime/oat_file_assistant_test.cc51
-rw-r--r--runtime/oat_file_manager.cc8
-rw-r--r--runtime/oat_file_test.cc50
-rw-r--r--runtime/object_lock.cc4
-rw-r--r--runtime/openjdkjvmti/ti_class.cc221
-rw-r--r--runtime/openjdkjvmti/ti_class_loader.cc8
-rw-r--r--runtime/openjdkjvmti/ti_redefine.cc160
-rw-r--r--runtime/openjdkjvmti/ti_redefine.h9
-rw-r--r--runtime/openjdkjvmti/ti_threadgroup.cc6
-rw-r--r--runtime/proxy_test.cc20
-rw-r--r--runtime/reference_table_test.cc4
-rw-r--r--runtime/reflection.cc10
-rw-r--r--runtime/runtime.cc2
-rw-r--r--runtime/runtime_callbacks_test.cc2
-rw-r--r--runtime/stack.cc12
-rw-r--r--runtime/stack_map.h21
-rw-r--r--runtime/thread-inl.h29
-rw-r--r--runtime/thread.cc16
-rw-r--r--runtime/thread.h3
-rw-r--r--runtime/thread_list.cc74
-rw-r--r--runtime/thread_list.h4
-rw-r--r--runtime/transaction_test.cc62
-rw-r--r--runtime/utils.h24
-rw-r--r--runtime/utils/dex_cache_arrays_layout-inl.h21
-rw-r--r--runtime/utils/dex_cache_arrays_layout.h14
-rw-r--r--runtime/utils_test.cc19
-rw-r--r--runtime/verifier/method_verifier.cc150
-rw-r--r--runtime/verifier/method_verifier.h5
-rw-r--r--runtime/verifier/verifier_deps.cc8
-rw-r--r--runtime/zip_archive.cc113
-rw-r--r--runtime/zip_archive.h18
131 files changed, 4071 insertions, 1231 deletions
diff --git a/runtime/Android.bp b/runtime/Android.bp
index 9585ba2d8e..d3a81a9add 100644
--- a/runtime/Android.bp
+++ b/runtime/Android.bp
@@ -123,6 +123,7 @@ cc_defaults {
"memory_region.cc",
"method_handles.cc",
"mirror/array.cc",
+ "mirror/call_site.cc",
"mirror/class.cc",
"mirror/class_ext.cc",
"mirror/dex_cache.cc",
@@ -131,6 +132,7 @@ cc_defaults {
"mirror/field.cc",
"mirror/method.cc",
"mirror/method_handle_impl.cc",
+ "mirror/method_handles_lookup.cc",
"mirror/method_type.cc",
"mirror/object.cc",
"mirror/reference.cc",
@@ -546,6 +548,7 @@ art_cc_test {
"gc/reference_queue_test.cc",
"gc/space/dlmalloc_space_static_test.cc",
"gc/space/dlmalloc_space_random_test.cc",
+ "gc/space/image_space_test.cc",
"gc/space/large_object_space_test.cc",
"gc/space/rosalloc_space_static_test.cc",
"gc/space/rosalloc_space_random_test.cc",
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index a443a4060d..cfe8406fbf 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -965,9 +965,27 @@ ENTRY \name
END \name
.endm
-ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+// Macro for string and type resolution and initialization.
+.macro ONE_ARG_SAVE_EVERYTHING_DOWNCALL name, entrypoint
+ .extern \entrypoint
+ENTRY \name
+ SETUP_SAVE_EVERYTHING_FRAME r1 @ save everything in case of GC
+ mov r1, r9 @ pass Thread::Current
+ bl \entrypoint @ (uint32_t index, Thread*)
+ cbz r0, 1f @ If result is null, deliver the OOME.
+ .cfi_remember_state
+ RESTORE_SAVE_EVERYTHING_FRAME_KEEP_R0
+ bx lr
+ .cfi_restore_state
+1:
+ DELIVER_PENDING_EXCEPTION_FRAME_READY
+END \name
+.endm
+
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
/*
* Called by managed code to resolve a static field and load a non-wide value.
@@ -1066,27 +1084,6 @@ ENTRY art_quick_set64_static
DELIVER_PENDING_EXCEPTION
END art_quick_set64_static
- /*
- * Entry from managed code to resolve a string, this stub will
- * check the dex cache for a matching string (the fast path), and if not found,
- * it will allocate a String and deliver an exception on error.
- * On success the String is returned. R0 holds the string index.
- */
-
-ENTRY art_quick_resolve_string
- SETUP_SAVE_EVERYTHING_FRAME r1 @ save everything in case of GC
- mov r1, r9 @ pass Thread::Current
- bl artResolveStringFromCode @ (uint32_t type_idx, Thread*)
- cbz r0, 1f @ If result is null, deliver the OOME.
- .cfi_remember_state
- RESTORE_SAVE_EVERYTHING_FRAME_KEEP_R0
- bx lr
- .cfi_restore_state
-1:
- DELIVER_PENDING_EXCEPTION_FRAME_READY
-END art_quick_resolve_string
-
-
// Generate the allocation entrypoints for each allocator.
GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS
// Comment out allocators that have arm specific asm.
@@ -2057,7 +2054,9 @@ ENTRY \name
beq .Lret_forwarding_address\name
.Lslow_rb_\name:
- // Save IP: the kSaveEverything entrypoint art_quick_resolve_string makes a tail call here.
+ // Save IP: The kSaveEverything entrypoint art_quick_resolve_string used to
+ // make a tail call here. Currently, it serves only for stack alignment but
+ // we may reintroduce kSaveEverything calls here in the future.
push {r0-r4, r9, ip, lr} @ save return address, core caller-save registers and ip
.cfi_adjust_cfa_offset 32
.cfi_rel_offset r0, 0
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 219d8b447a..bfbe4816ba 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -1553,6 +1553,24 @@ ENTRY \name
END \name
.endm
+// Macro for string and type resolution and initialization.
+.macro ONE_ARG_SAVE_EVERYTHING_DOWNCALL name, entrypoint
+ .extern \entrypoint
+ENTRY \name
+ SETUP_SAVE_EVERYTHING_FRAME // save everything for stack crawl
+ mov x1, xSELF // pass Thread::Current
+ bl \entrypoint // (int32_t index, Thread* self)
+ cbz w0, 1f // If result is null, deliver the OOME.
+ .cfi_remember_state
+ RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
+ ret // return
+ .cfi_restore_state
+ .cfi_def_cfa_offset FRAME_SIZE_SAVE_EVERYTHING // workaround for clang bug: 31975598
+1:
+ DELIVER_PENDING_EXCEPTION_FRAME_READY
+END \name
+.endm
+
.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
cbz w0, 1f // result zero branch over
ret // return
@@ -1571,10 +1589,11 @@ TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode,
* initializer and deliver the exception on error. On success the static storage base is
* returned.
*/
-ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
-ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
@@ -1604,27 +1623,6 @@ THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCompiledCod
THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCompiledCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCompiledCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
- /*
- * Entry from managed code to resolve a string, this stub will
- * check the dex cache for a matching string (the fast path), and if not found,
- * it will allocate a String and deliver an exception on error.
- * On success the String is returned. R0 holds the string index.
- */
-
-ENTRY art_quick_resolve_string
- SETUP_SAVE_EVERYTHING_FRAME // save everything for stack crawl
- mov x1, xSELF // pass Thread::Current
- bl artResolveStringFromCode // (int32_t string_idx, Thread* self)
- cbz w0, 1f // If result is null, deliver the OOME.
- .cfi_remember_state
- RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
- ret // return
- .cfi_restore_state
- .cfi_def_cfa_offset FRAME_SIZE_SAVE_EVERYTHING // workaround for clang bug: 31975598
-1:
- DELIVER_PENDING_EXCEPTION_FRAME_READY
-END art_quick_resolve_string
-
// Generate the allocation entrypoints for each allocator.
GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS
// Comment out allocators that have arm64 specific asm.
@@ -2380,13 +2378,6 @@ END art_quick_indexof
ENTRY \name
// Reference is null, no work to do at all.
cbz \wreg, .Lret_rb_\name
- /*
- * Allocate 46 stack slots * 8 = 368 bytes:
- * - 20 slots for core registers X0-X19
- * - 24 slots for floating-point registers D0-D7 and D16-D31
- * - 1 slot for return address register XLR
- * - 1 padding slot for 16-byte stack alignment
- */
// Use wIP0 as temp and check the mark bit of the reference. wIP0 is not used by the compiler.
ldr wIP0, [\xreg, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
tbz wIP0, #LOCK_WORD_MARK_BIT_SHIFT, .Lnot_marked_rb_\name
@@ -2398,10 +2389,15 @@ ENTRY \name
cmp wzr, wIP0, lsr #30
beq .Lret_forwarding_address\name
.Lslow_rb_\name:
- // We must not clobber IP0 since art_quick_resolve_string makes a tail call here and relies on
- // IP0 being restored.
+ /*
+ * Allocate 44 stack slots * 8 = 352 bytes:
+ * - 20 slots for core registers X0-15, X17-X19, LR
+ * - 24 slots for floating-point registers D0-D7 and D16-D31
+ */
+ // We must not clobber IP1 since code emitted for HLoadClass and HLoadString
+ // relies on IP1 being preserved.
// Save all potentially live caller-save core registers.
- SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 368
+ SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 352
SAVE_TWO_REGS x2, x3, 16
SAVE_TWO_REGS x4, x5, 32
SAVE_TWO_REGS x6, x7, 48
@@ -2409,8 +2405,8 @@ ENTRY \name
SAVE_TWO_REGS x10, x11, 80
SAVE_TWO_REGS x12, x13, 96
SAVE_TWO_REGS x14, x15, 112
- SAVE_TWO_REGS x16, x17, 128
- SAVE_TWO_REGS x18, x19, 144
+ SAVE_TWO_REGS x17, x18, 128 // Skip x16, i.e. IP0.
+ SAVE_TWO_REGS x19, xLR, 144 // Save also return address.
// Save all potentially live caller-save floating-point registers.
stp d0, d1, [sp, #160]
stp d2, d3, [sp, #176]
@@ -2424,9 +2420,6 @@ ENTRY \name
stp d26, d27, [sp, #304]
stp d28, d29, [sp, #320]
stp d30, d31, [sp, #336]
- // Save return address.
- // (sp + #352 is a padding slot)
- SAVE_REG xLR, 360
.ifnc \wreg, w0
mov w0, \wreg // Pass arg1 - obj from `wreg`
@@ -2446,8 +2439,8 @@ ENTRY \name
POP_REGS_NE x10, x11, 80, \xreg
POP_REGS_NE x12, x13, 96, \xreg
POP_REGS_NE x14, x15, 112, \xreg
- POP_REGS_NE x16, x17, 128, \xreg
- POP_REGS_NE x18, x19, 144, \xreg
+ POP_REGS_NE x17, x18, 128, \xreg
+ POP_REGS_NE x19, xLR, 144, \xreg // Restore also return address.
// Restore floating-point registers.
ldp d0, d1, [sp, #160]
ldp d2, d3, [sp, #176]
@@ -2461,9 +2454,8 @@ ENTRY \name
ldp d26, d27, [sp, #304]
ldp d28, d29, [sp, #320]
ldp d30, d31, [sp, #336]
- // Restore return address and remove padding.
- RESTORE_REG xLR, 360
- DECREASE_FRAME 368
+ // Remove frame and return.
+ DECREASE_FRAME 352
ret
.Lret_forwarding_address\name:
mvn wIP0, wIP0
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index 663cb6c62f..ec8ae85722 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -1576,9 +1576,87 @@ END \name
// Generate the allocation entrypoints for each allocator.
GENERATE_ALLOC_ENTRYPOINTS_FOR_EACH_ALLOCATOR
+// A hand-written override for:
+// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc)
+// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc)
+.macro ART_QUICK_ALLOC_OBJECT_ROSALLOC c_name, cxx_name
+ENTRY \c_name
+ # Fast path rosalloc allocation
+ # a0: type
+ # s1: Thread::Current
+ # -----------------------------
+ # t1: object size
+ # t2: rosalloc run
+ # t3: thread stack top offset
+ # t4: thread stack bottom offset
+ # v0: free list head
+ #
+ # t5, t6 : temps
+ lw $t3, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET($s1) # Check if thread local allocation
+ lw $t4, THREAD_LOCAL_ALLOC_STACK_END_OFFSET($s1) # stack has any room left.
+ bgeu $t3, $t4, .Lslow_path_\c_name
+
+ lw $t1, MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET($a0) # Load object size (t1).
+ li $t5, ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE # Check if size is for a thread local
+ # allocation. Also does the
+ # initialized and finalizable checks.
+ bgtu $t1, $t5, .Lslow_path_\c_name
+
+ # Compute the rosalloc bracket index from the size. Since the size is already aligned we can
+ # combine the two shifts together.
+ srl $t1, $t1, (ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT - POINTER_SIZE_SHIFT)
+
+ addu $t2, $t1, $s1
+ lw $t2, (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)($t2) # Load rosalloc run (t2).
+
+ # Load the free list head (v0).
+ # NOTE: this will be the return val.
+ lw $v0, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)($t2)
+ beqz $v0, .Lslow_path_\c_name
+ nop
+
+ # Load the next pointer of the head and update the list head with the next pointer.
+ lw $t5, ROSALLOC_SLOT_NEXT_OFFSET($v0)
+ sw $t5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)($t2)
+
+ # Store the class pointer in the header. This also overwrites the first pointer. The offsets are
+ # asserted to match.
+
+#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
+#error "Class pointer needs to overwrite next pointer."
+#endif
+
+ POISON_HEAP_REF $a0
+ sw $a0, MIRROR_OBJECT_CLASS_OFFSET($v0)
+
+ # Push the new object onto the thread local allocation stack and increment the thread local
+ # allocation stack top.
+ sw $v0, 0($t3)
+ addiu $t3, $t3, COMPRESSED_REFERENCE_SIZE
+ sw $t3, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET($s1)
+
+ # Decrement the size of the free list.
+ lw $t5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)($t2)
+ addiu $t5, $t5, -1
+ sw $t5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)($t2)
+
+ sync # Fence.
+
+ jalr $zero, $ra
+ nop
+
+ .Lslow_path_\c_name:
+ SETUP_SAVE_REFS_ONLY_FRAME
+ la $t9, \cxx_name
+ jalr $t9
+ move $a1, $s1 # Pass self as argument.
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+END \c_name
+.endm
+
+ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_resolved_rosalloc, artAllocObjectFromCodeResolvedRosAlloc
+ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_initialized_rosalloc, artAllocObjectFromCodeInitializedRosAlloc
-GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc)
-GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc)
GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB)
GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
@@ -1964,67 +2042,158 @@ ENTRY_NO_GP art_quick_indexof
/* $a0 holds address of "this" */
/* $a1 holds "ch" */
/* $a2 holds "fromIndex" */
- lw $t0, MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
- slt $t1, $a2, $zero # if fromIndex < 0
+#if (STRING_COMPRESSION_FEATURE)
+ lw $a3, MIRROR_STRING_COUNT_OFFSET($a0) # 'count' field of this
+#else
+ lw $t0, MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
+#endif
+ slt $t1, $a2, $zero # if fromIndex < 0
#if defined(_MIPS_ARCH_MIPS32R6) || defined(_MIPS_ARCH_MIPS64R6)
- seleqz $a2, $a2, $t1 # fromIndex = 0;
+ seleqz $a2, $a2, $t1 # fromIndex = 0;
#else
- movn $a2, $zero, $t1 # fromIndex = 0;
+ movn $a2, $zero, $t1 # fromIndex = 0;
#endif
- subu $t0, $t0, $a2 # this.length() - fromIndex
- blez $t0, 6f # if this.length()-fromIndex <= 0
- li $v0, -1 # return -1;
-
- sll $v0, $a2, 1 # $a0 += $a2 * 2
- addu $a0, $a0, $v0 # " ditto "
- move $v0, $a2 # Set i to fromIndex.
+#if (STRING_COMPRESSION_FEATURE)
+ srl $t0, $a3, 1 # $a3 holds count (with flag) and $t0 holds actual length
+#endif
+ subu $t0, $t0, $a2 # this.length() - fromIndex
+ blez $t0, 6f # if this.length()-fromIndex <= 0
+ li $v0, -1 # return -1;
+
+#if (STRING_COMPRESSION_FEATURE)
+ sll $a3, $a3, 31 # Extract compression flag.
+ beqz $a3, .Lstring_indexof_compressed
+ move $t2, $a0 # Save a copy in $t2 to later compute result (in branch delay slot).
+#endif
+ sll $v0, $a2, 1 # $a0 += $a2 * 2
+ addu $a0, $a0, $v0 # " ditto "
+ move $v0, $a2 # Set i to fromIndex.
1:
- lhu $t3, MIRROR_STRING_VALUE_OFFSET($a0) # if this.charAt(i) == ch
- beq $t3, $a1, 6f # return i;
- addu $a0, $a0, 2 # i++
- subu $t0, $t0, 1 # this.length() - i
- bnez $t0, 1b # while this.length() - i > 0
- addu $v0, $v0, 1 # i++
+ lhu $t3, MIRROR_STRING_VALUE_OFFSET($a0) # if this.charAt(i) == ch
+ beq $t3, $a1, 6f # return i;
+ addu $a0, $a0, 2 # i++
+ subu $t0, $t0, 1 # this.length() - i
+ bnez $t0, 1b # while this.length() - i > 0
+ addu $v0, $v0, 1 # i++
- li $v0, -1 # if this.length() - i <= 0
- # return -1;
+ li $v0, -1 # if this.length() - i <= 0
+ # return -1;
6:
- j $ra
- nop
+ j $ra
+ nop
+
+#if (STRING_COMPRESSION_FEATURE)
+.Lstring_indexof_compressed:
+ addu $a0, $a0, $a2 # $a0 += $a2
+
+.Lstring_indexof_compressed_loop:
+ lbu $t3, MIRROR_STRING_VALUE_OFFSET($a0)
+ beq $t3, $a1, .Lstring_indexof_compressed_matched
+ subu $t0, $t0, 1
+ bgtz $t0, .Lstring_indexof_compressed_loop
+ addu $a0, $a0, 1
+
+.Lstring_indexof_nomatch:
+ jalr $zero, $ra
+ li $v0, -1 # return -1;
+
+.Lstring_indexof_compressed_matched:
+ jalr $zero, $ra
+ subu $v0, $a0, $t2 # return (current - start);
+#endif
END art_quick_indexof
/* java.lang.String.compareTo(String anotherString) */
ENTRY_NO_GP art_quick_string_compareto
/* $a0 holds address of "this" */
/* $a1 holds address of "anotherString" */
- beq $a0, $a1, 9f # this and anotherString are the same object
- move $v0, $zero
+ beq $a0, $a1, .Lstring_compareto_length_diff # this and anotherString are the same object
+ move $a3, $a2 # trick to return 0 (it returns a2 - a3)
+
+#if (STRING_COMPRESSION_FEATURE)
+ lw $t0, MIRROR_STRING_COUNT_OFFSET($a0) # 'count' field of this
+ lw $t1, MIRROR_STRING_COUNT_OFFSET($a1) # 'count' field of anotherString
+ sra $a2, $t0, 1 # this.length()
+ sra $a3, $t1, 1 # anotherString.length()
+#else
+ lw $a2, MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
+ lw $a3, MIRROR_STRING_COUNT_OFFSET($a1) # anotherString.length()
+#endif
- lw $a2, MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
- lw $a3, MIRROR_STRING_COUNT_OFFSET($a1) # anotherString.length()
- MINu $t2, $a2, $a3
-# $t2 now holds min(this.length(),anotherString.length())
+ MINu $t2, $a2, $a3
+ # $t2 now holds min(this.length(),anotherString.length())
- beqz $t2, 9f # while min(this.length(),anotherString.length())-i != 0
- subu $v0, $a2, $a3 # if $t2==0 return
- # (this.length() - anotherString.length())
-1:
- lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0) # while this.charAt(i) == anotherString.charAt(i)
- lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
- bne $t0, $t1, 9f # if this.charAt(i) != anotherString.charAt(i)
- subu $v0, $t0, $t1 # return (this.charAt(i) - anotherString.charAt(i))
- addiu $a0, $a0, 2 # point at this.charAt(i++)
- subu $t2, $t2, 1 # new value of
- # min(this.length(),anotherString.length())-i
- bnez $t2, 1b
- addiu $a1, $a1, 2 # point at anotherString.charAt(i++)
- subu $v0, $a2, $a3
-
-9:
- j $ra
- nop
+ # while min(this.length(),anotherString.length())-i != 0
+ beqz $t2, .Lstring_compareto_length_diff # if $t2==0
+ nop # return (this.length() - anotherString.length())
+
+#if (STRING_COMPRESSION_FEATURE)
+ # Differ cases:
+ sll $t3, $t0, 31
+ beqz $t3, .Lstring_compareto_this_is_compressed
+ sll $t3, $t1, 31 # In branch delay slot.
+ beqz $t3, .Lstring_compareto_that_is_compressed
+ nop
+ b .Lstring_compareto_both_not_compressed
+ nop
+
+.Lstring_compareto_this_is_compressed:
+ beqz $t3, .Lstring_compareto_both_compressed
+ nop
+ /* If (this->IsCompressed() && that->IsCompressed() == false) */
+.Lstring_compareto_loop_comparison_this_compressed:
+ lbu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
+ lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
+ bne $t0, $t1, .Lstring_compareto_char_diff
+ addiu $a0, $a0, 1 # point at this.charAt(i++) - compressed
+ subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
+ bnez $t2, .Lstring_compareto_loop_comparison_this_compressed
+ addiu $a1, $a1, 2 # point at anotherString.charAt(i++) - uncompressed
+ jalr $zero, $ra
+ subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
+
+.Lstring_compareto_that_is_compressed:
+ lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
+ lbu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
+ bne $t0, $t1, .Lstring_compareto_char_diff
+ addiu $a0, $a0, 2 # point at this.charAt(i++) - uncompressed
+ subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
+ bnez $t2, .Lstring_compareto_that_is_compressed
+ addiu $a1, $a1, 1 # point at anotherString.charAt(i++) - compressed
+ jalr $zero, $ra
+ subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
+
+.Lstring_compareto_both_compressed:
+ lbu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
+ lbu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
+ bne $t0, $t1, .Lstring_compareto_char_diff
+ addiu $a0, $a0, 1 # point at this.charAt(i++) - compressed
+ subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
+ bnez $t2, .Lstring_compareto_both_compressed
+ addiu $a1, $a1, 1 # point at anotherString.charAt(i++) - compressed
+ jalr $zero, $ra
+ subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
+#endif
+
+.Lstring_compareto_both_not_compressed:
+ lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0) # while this.charAt(i) == anotherString.charAt(i)
+ lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
+ bne $t0, $t1, .Lstring_compareto_char_diff # if this.charAt(i) != anotherString.charAt(i)
+ # return (this.charAt(i) - anotherString.charAt(i))
+ addiu $a0, $a0, 2 # point at this.charAt(i++)
+ subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
+ bnez $t2, .Lstring_compareto_both_not_compressed
+ addiu $a1, $a1, 2 # point at anotherString.charAt(i++)
+
+.Lstring_compareto_length_diff:
+ jalr $zero, $ra
+ subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
+
+.Lstring_compareto_char_diff:
+ jalr $zero, $ra
+ subu $v0, $t0, $t1 # return (this.charAt(i) - anotherString.charAt(i))
END art_quick_string_compareto
.extern artInvokePolymorphic
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index 5fee575331..28d7c77938 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -1533,8 +1533,85 @@ END \name
// Generate the allocation entrypoints for each allocator.
GENERATE_ALLOC_ENTRYPOINTS_FOR_EACH_ALLOCATOR
-GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc)
-GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc)
+// A hand-written override for:
+// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc)
+// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc)
+.macro ART_QUICK_ALLOC_OBJECT_ROSALLOC c_name, cxx_name
+ENTRY \c_name
+ # Fast path rosalloc allocation
+ # a0: type
+ # s1: Thread::Current
+ # -----------------------------
+ # t1: object size
+ # t2: rosalloc run
+ # t3: thread stack top offset
+ # a4: thread stack bottom offset
+ # v0: free list head
+ #
+ # a5, a6 : temps
+ ld $t3, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET($s1) # Check if thread local allocation stack
+ ld $a4, THREAD_LOCAL_ALLOC_STACK_END_OFFSET($s1) # has any room left.
+ bgeuc $t3, $a4, .Lslow_path_\c_name
+
+ lwu $t1, MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET($a0) # Load object size (t1).
+ li $a5, ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE # Check if size is for a thread local
+ # allocation. Also does the initialized
+ # and finalizable checks.
+ bltuc $a5, $t1, .Lslow_path_\c_name
+
+ # Compute the rosalloc bracket index from the size. Since the size is already aligned we can
+ # combine the two shifts together.
+ dsrl $t1, $t1, (ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT - POINTER_SIZE_SHIFT)
+
+ daddu $t2, $t1, $s1
+ ld $t2, (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)($t2) # Load rosalloc run (t2).
+
+ # Load the free list head (v0).
+ # NOTE: this will be the return val.
+ ld $v0, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)($t2)
+ beqzc $v0, .Lslow_path_\c_name
+
+ # Load the next pointer of the head and update the list head with the next pointer.
+ ld $a5, ROSALLOC_SLOT_NEXT_OFFSET($v0)
+ sd $a5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)($t2)
+
+ # Store the class pointer in the header. This also overwrites the first pointer. The offsets are
+ # asserted to match.
+
+#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
+#error "Class pointer needs to overwrite next pointer."
+#endif
+
+ POISON_HEAP_REF $a0
+ sw $a0, MIRROR_OBJECT_CLASS_OFFSET($v0)
+
+ # Push the new object onto the thread local allocation stack and increment the thread local
+ # allocation stack top.
+ sd $v0, 0($t3)
+ daddiu $t3, $t3, COMPRESSED_REFERENCE_SIZE
+ sd $t3, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET($s1)
+
+ # Decrement the size of the free list.
+ lw $a5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)($t2)
+ addiu $a5, $a5, -1
+ sw $a5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)($t2)
+
+ sync # Fence.
+
+ jalr $zero, $ra
+ .cpreturn # Restore gp from t8 in branch delay slot.
+
+.Lslow_path_\c_name:
+ SETUP_SAVE_REFS_ONLY_FRAME
+ jal \cxx_name
+ move $a1 ,$s1 # Pass self as argument.
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+END \c_name
+.endm
+
+ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_resolved_rosalloc, artAllocObjectFromCodeResolvedRosAlloc
+ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_initialized_rosalloc, artAllocObjectFromCodeInitializedRosAlloc
+
GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB)
GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
@@ -1823,32 +1900,91 @@ END art_quick_deoptimize_from_compiled_code
ENTRY_NO_GP art_quick_string_compareto
/* $a0 holds address of "this" */
/* $a1 holds address of "anotherString" */
- beq $a0,$a1,9f # this and anotherString are the same object
- move $v0,$zero
+ move $a2, $zero
+ beq $a0, $a1, .Lstring_compareto_length_diff # this and anotherString are the same object
+ move $a3, $zero # return 0 (it returns a2 - a3)
+
+#if (STRING_COMPRESSION_FEATURE)
+ lw $a4, MIRROR_STRING_COUNT_OFFSET($a0) # 'count' field of this
+ lw $a5, MIRROR_STRING_COUNT_OFFSET($a1) # 'count' field of anotherString
+ sra $a2, $a4, 1 # this.length()
+ sra $a3, $a5, 1 # anotherString.length()
+#else
+ lw $a2, MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
+ lw $a3, MIRROR_STRING_COUNT_OFFSET($a1) # anotherString.length()
+#endif
- lw $a2,MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
- lw $a3,MIRROR_STRING_COUNT_OFFSET($a1) # anotherString.length()
- MINu $t2, $a2, $a3
-# $t2 now holds min(this.length(),anotherString.length())
+ MINu $t2, $a2, $a3
+ # $t2 now holds min(this.length(),anotherString.length())
- beqz $t2,9f # while min(this.length(),anotherString.length())-i != 0
- subu $v0,$a2,$a3 # if $t2==0 return
- # (this.length() - anotherString.length())
-1:
- lhu $t0,MIRROR_STRING_VALUE_OFFSET($a0) # while this.charAt(i) == anotherString.charAt(i)
- lhu $t1,MIRROR_STRING_VALUE_OFFSET($a1)
- bne $t0,$t1,9f # if this.charAt(i) != anotherString.charAt(i)
- subu $v0,$t0,$t1 # return (this.charAt(i) - anotherString.charAt(i))
- daddiu $a0,$a0,2 # point at this.charAt(i++)
- subu $t2,$t2,1 # new value of
- # min(this.length(),anotherString.length())-i
- bnez $t2,1b
- daddiu $a1,$a1,2 # point at anotherString.charAt(i++)
- subu $v0,$a2,$a3
-
-9:
- j $ra
- nop
+ # while min(this.length(),anotherString.length())-i != 0
+ beqzc $t2, .Lstring_compareto_length_diff # if $t2==0
+ # return (this.length() - anotherString.length())
+
+#if (STRING_COMPRESSION_FEATURE)
+ # Differ cases:
+ dext $a6, $a4, 0, 1
+ beqz $a6, .Lstring_compareto_this_is_compressed
+ dext $a6, $a5, 0, 1 # In branch delay slot.
+ beqz $a6, .Lstring_compareto_that_is_compressed
+ nop
+ b .Lstring_compareto_both_not_compressed
+ nop
+
+.Lstring_compareto_this_is_compressed:
+ beqzc $a6, .Lstring_compareto_both_compressed
+ /* If (this->IsCompressed() && that->IsCompressed() == false) */
+.Lstring_compareto_loop_comparison_this_compressed:
+ lbu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
+ lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
+ bnec $t0, $t1, .Lstring_compareto_char_diff
+ daddiu $a0, $a0, 1 # point at this.charAt(i++) - compressed
+ subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
+ bnez $t2, .Lstring_compareto_loop_comparison_this_compressed
+ daddiu $a1, $a1, 2 # point at anotherString.charAt(i++) - uncompressed
+ jalr $zero, $ra
+ subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
+
+.Lstring_compareto_that_is_compressed:
+ lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
+ lbu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
+ bnec $t0, $t1, .Lstring_compareto_char_diff
+ daddiu $a0, $a0, 2 # point at this.charAt(i++) - uncompressed
+ subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
+ bnez $t2, .Lstring_compareto_that_is_compressed
+ daddiu $a1, $a1, 1 # point at anotherString.charAt(i++) - compressed
+ jalr $zero, $ra
+ subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
+
+.Lstring_compareto_both_compressed:
+ lbu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
+ lbu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
+ bnec $t0, $t1, .Lstring_compareto_char_diff
+ daddiu $a0, $a0, 1 # point at this.charAt(i++) - compressed
+ subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
+ bnez $t2, .Lstring_compareto_both_compressed
+ daddiu $a1, $a1, 1 # point at anotherString.charAt(i++) - compressed
+ jalr $zero, $ra
+ subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
+#endif
+
+.Lstring_compareto_both_not_compressed:
+ lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0) # while this.charAt(i) == anotherString.charAt(i)
+ lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
+ bnec $t0, $t1, .Lstring_compareto_char_diff # if this.charAt(i) != anotherString.charAt(i)
+ # return (this.charAt(i) - anotherString.charAt(i))
+ daddiu $a0, $a0, 2 # point at this.charAt(i++)
+ subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
+ bnez $t2, .Lstring_compareto_both_not_compressed
+ daddiu $a1, $a1, 2 # point at anotherString.charAt(i++)
+
+.Lstring_compareto_length_diff:
+ jalr $zero, $ra
+ subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
+
+.Lstring_compareto_char_diff:
+ jalr $zero, $ra
+ subu $v0, $t0, $t1 # return (this.charAt(i) - anotherString.charAt(i))
END art_quick_string_compareto
/* java.lang.String.indexOf(int ch, int fromIndex=0) */
@@ -1856,31 +1992,64 @@ ENTRY_NO_GP art_quick_indexof
/* $a0 holds address of "this" */
/* $a1 holds "ch" */
/* $a2 holds "fromIndex" */
- lw $t0,MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
- slt $at, $a2, $zero # if fromIndex < 0
- seleqz $a2, $a2, $at # fromIndex = 0;
- subu $t0,$t0,$a2 # this.length() - fromIndex
- blez $t0,6f # if this.length()-fromIndex <= 0
- li $v0,-1 # return -1;
+#if (STRING_COMPRESSION_FEATURE)
+ lw $a3, MIRROR_STRING_COUNT_OFFSET($a0) # 'count' field of this
+#else
+ lw $t0, MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
+#endif
+ slt $at, $a2, $zero # if fromIndex < 0
+ seleqz $a2, $a2, $at # fromIndex = 0;
+#if (STRING_COMPRESSION_FEATURE)
+ srl $t0, $a3, 1 # $a3 holds count (with flag) and $t0 holds actual length
+#endif
+ subu $t0, $t0, $a2 # this.length() - fromIndex
+ blez $t0, 6f # if this.length()-fromIndex <= 0
+ li $v0, -1 # return -1;
- sll $v0,$a2,1 # $a0 += $a2 * 2
- daddu $a0,$a0,$v0 # " ditto "
- move $v0,$a2 # Set i to fromIndex.
+#if (STRING_COMPRESSION_FEATURE)
+ dext $a3, $a3, 0, 1 # Extract compression flag.
+ beqzc $a3, .Lstring_indexof_compressed
+#endif
+
+ sll $v0, $a2, 1 # $a0 += $a2 * 2
+ daddu $a0, $a0, $v0 # " ditto "
+ move $v0, $a2 # Set i to fromIndex.
1:
- lhu $t3,MIRROR_STRING_VALUE_OFFSET($a0) # if this.charAt(i) == ch
- beq $t3,$a1,6f # return i;
- daddu $a0,$a0,2 # i++
- subu $t0,$t0,1 # this.length() - i
- bnez $t0,1b # while this.length() - i > 0
- addu $v0,$v0,1 # i++
+ lhu $t3, MIRROR_STRING_VALUE_OFFSET($a0) # if this.charAt(i) == ch
+ beq $t3, $a1, 6f # return i;
+ daddu $a0, $a0, 2 # i++
+ subu $t0, $t0, 1 # this.length() - i
+ bnez $t0, 1b # while this.length() - i > 0
+ addu $v0, $v0, 1 # i++
- li $v0,-1 # if this.length() - i <= 0
- # return -1;
+ li $v0, -1 # if this.length() - i <= 0
+ # return -1;
6:
- j $ra
- nop
+ j $ra
+ nop
+
+#if (STRING_COMPRESSION_FEATURE)
+.Lstring_indexof_compressed:
+ move $a4, $a0 # Save a copy in $a4 to later compute result.
+ daddu $a0, $a0, $a2 # $a0 += $a2
+
+.Lstring_indexof_compressed_loop:
+ lbu $t3, MIRROR_STRING_VALUE_OFFSET($a0)
+ beq $t3, $a1, .Lstring_indexof_compressed_matched
+ subu $t0, $t0, 1
+ bgtz $t0, .Lstring_indexof_compressed_loop
+ daddu $a0, $a0, 1
+
+.Lstring_indexof_nomatch:
+ jalr $zero, $ra
+ li $v0, -1 # return -1;
+
+.Lstring_indexof_compressed_matched:
+ jalr $zero, $ra
+ dsubu $v0, $a0, $a4 # return (current - start);
+#endif
END art_quick_indexof
.extern artInvokePolymorphic
diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc
index 0bf08a6d97..207bf9d365 100644
--- a/runtime/arch/stub_test.cc
+++ b/runtime/arch/stub_test.cc
@@ -984,7 +984,7 @@ TEST_F(StubTest, AllocObject) {
while (length > 10) {
Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
- if (self->IsExceptionPending() || h.Get() == nullptr) {
+ if (self->IsExceptionPending() || h == nullptr) {
self->ClearException();
// Try a smaller length
@@ -1003,7 +1003,7 @@ TEST_F(StubTest, AllocObject) {
// Allocate simple objects till it fails.
while (!self->IsExceptionPending()) {
Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
- if (!self->IsExceptionPending() && h.Get() != nullptr) {
+ if (!self->IsExceptionPending() && h != nullptr) {
handles.push_back(h);
}
}
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 76615e843b..8c907e0790 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -922,6 +922,31 @@ MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
END_FUNCTION VAR(c_name)
END_MACRO
+// Macro for string and type resolution and initialization.
+MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name)
+ DEFINE_FUNCTION VAR(c_name)
+ SETUP_SAVE_EVERYTHING_FRAME ebx, ebx // save ref containing registers for GC
+ // Outgoing argument set up
+ subl MACRO_LITERAL(8), %esp // push padding
+ CFI_ADJUST_CFA_OFFSET(8)
+ pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
+ CFI_ADJUST_CFA_OFFSET(4)
+ PUSH eax // pass arg1
+ call CALLVAR(cxx_name) // cxx_name(arg1, Thread*)
+ addl MACRO_LITERAL(16), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-16)
+ testl %eax, %eax // If result is null, deliver the OOME.
+ jz 1f
+ CFI_REMEMBER_STATE
+ RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX // restore frame up to return address
+ ret // return
+ CFI_RESTORE_STATE
+ CFI_DEF_CFA(esp, FRAME_SIZE_SAVE_EVERYTHING) // workaround for clang bug: 31975598
+1:
+ DELIVER_PENDING_EXCEPTION_FRAME_READY
+ END_FUNCTION VAR(c_name)
+END_MACRO
+
MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
testl %eax, %eax // eax == 0 ?
jz 1f // if eax == 0 goto 1
@@ -1245,31 +1270,10 @@ GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_tlab, artAllocArrayFr
GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_32
GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_64
-DEFINE_FUNCTION art_quick_resolve_string
- SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
- // Outgoing argument set up
- subl LITERAL(8), %esp // push padding
- CFI_ADJUST_CFA_OFFSET(8)
- pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- CFI_ADJUST_CFA_OFFSET(4)
- PUSH eax // pass arg1
- call SYMBOL(artResolveStringFromCode)
- addl LITERAL(16), %esp // pop arguments
- CFI_ADJUST_CFA_OFFSET(-16)
- testl %eax, %eax // If result is null, deliver the OOME.
- jz 1f
- CFI_REMEMBER_STATE
- RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX
- ret
- CFI_RESTORE_STATE
- CFI_DEF_CFA(esp, FRAME_SIZE_SAVE_EVERYTHING) // workaround for clang bug: 31975598
-1:
- DELIVER_PENDING_EXCEPTION_FRAME_READY
-END_FUNCTION art_quick_resolve_string
-
-ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index a1ae858735..f1be52eeb6 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -950,6 +950,26 @@ MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
END_FUNCTION VAR(c_name)
END_MACRO
+// Macro for string and type resolution and initialization.
+MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name)
+ DEFINE_FUNCTION VAR(c_name)
+ SETUP_SAVE_EVERYTHING_FRAME // save everything for GC
+ // Outgoing argument set up
+ movl %eax, %edi // pass string index
+ movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
+ call CALLVAR(cxx_name) // cxx_name(arg0, Thread*)
+ testl %eax, %eax // If result is null, deliver the OOME.
+ jz 1f
+ CFI_REMEMBER_STATE
+ RESTORE_SAVE_EVERYTHING_FRAME_KEEP_RAX // restore frame up to return address
+ ret
+ CFI_RESTORE_STATE
+ CFI_DEF_CFA(rsp, FRAME_SIZE_SAVE_EVERYTHING) // workaround for clang bug: 31975598
+1:
+ DELIVER_PENDING_EXCEPTION_FRAME_READY
+ END_FUNCTION VAR(c_name)
+END_MACRO
+
MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
testq %rax, %rax // rax == 0 ?
jz 1f // if rax == 0 goto 1
@@ -1270,27 +1290,10 @@ DEFINE_FUNCTION art_quick_alloc_object_initialized_region_tlab
ALLOC_OBJECT_TLAB_SLOW_PATH artAllocObjectFromCodeInitializedRegionTLAB
END_FUNCTION art_quick_alloc_object_initialized_region_tlab
-DEFINE_FUNCTION art_quick_resolve_string
- SETUP_SAVE_EVERYTHING_FRAME
- // Outgoing argument set up
- movl %eax, %edi // pass string index
- movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
- call SYMBOL(artResolveStringFromCode) // artResolveStringFromCode(arg0, Thread*)
-
- testl %eax, %eax // If result is null, deliver the OOME.
- jz 1f
- CFI_REMEMBER_STATE
- RESTORE_SAVE_EVERYTHING_FRAME_KEEP_RAX // restore frame up to return address
- ret
- CFI_RESTORE_STATE
- CFI_DEF_CFA(rsp, FRAME_SIZE_SAVE_EVERYTHING) // workaround for clang bug: 31975598
-1:
- DELIVER_PENDING_EXCEPTION_FRAME_READY
-END_FUNCTION art_quick_resolve_string
-
-ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index 6cb8544617..9d74e7c92b 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -67,7 +67,6 @@ ArtMethod* ArtMethod::GetNonObsoleteMethod() {
}
ArtMethod* ArtMethod::GetSingleImplementation(PointerSize pointer_size) {
- DCHECK(!IsNative());
if (!IsAbstract()) {
// A non-abstract's single implementation is itself.
return this;
@@ -275,7 +274,7 @@ uint32_t ArtMethod::FindCatchBlock(Handle<mirror::Class> exception_type,
*has_no_move_exception = (first_catch_instr->Opcode() != Instruction::MOVE_EXCEPTION);
}
// Put the exception back.
- if (exception.Get() != nullptr) {
+ if (exception != nullptr) {
self->SetException(exception.Get());
}
return found_dex_pc;
@@ -442,12 +441,56 @@ static uint32_t GetOatMethodIndexFromMethodIndex(const DexFile& dex_file,
UNREACHABLE();
}
+// We use the method's DexFile and declaring class name to find the OatMethod for an obsolete
+// method. This is extremely slow but we need it if we want to be able to have obsolete native
+// methods since we need this to find the size of its stack frames.
+//
+// NB We could (potentially) do this differently and rely on the way the transformation is applied
+// in order to use the entrypoint to find this information. However, for debugging reasons (most
+// notably making sure that new invokes of obsolete methods fail) we choose to instead get the data
+// directly from the dex file.
+static const OatFile::OatMethod FindOatMethodFromDexFileFor(ArtMethod* method, bool* found)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ DCHECK(method->IsObsolete() && method->IsNative());
+ const DexFile* dex_file = method->GetDexFile();
+
+ // recreate the class_def_index from the descriptor.
+ std::string descriptor_storage;
+ const DexFile::TypeId* declaring_class_type_id =
+ dex_file->FindTypeId(method->GetDeclaringClass()->GetDescriptor(&descriptor_storage));
+ CHECK(declaring_class_type_id != nullptr);
+ dex::TypeIndex declaring_class_type_index = dex_file->GetIndexForTypeId(*declaring_class_type_id);
+ const DexFile::ClassDef* declaring_class_type_def =
+ dex_file->FindClassDef(declaring_class_type_index);
+ CHECK(declaring_class_type_def != nullptr);
+ uint16_t declaring_class_def_index = dex_file->GetIndexForClassDef(*declaring_class_type_def);
+
+ size_t oat_method_index = GetOatMethodIndexFromMethodIndex(*dex_file,
+ declaring_class_def_index,
+ method->GetDexMethodIndex());
+
+ OatFile::OatClass oat_class = OatFile::FindOatClass(*dex_file,
+ declaring_class_def_index,
+ found);
+ if (!(*found)) {
+ return OatFile::OatMethod::Invalid();
+ }
+ return oat_class.GetOatMethod(oat_method_index);
+}
+
static const OatFile::OatMethod FindOatMethodFor(ArtMethod* method,
PointerSize pointer_size,
bool* found)
REQUIRES_SHARED(Locks::mutator_lock_) {
- // We shouldn't be calling this with obsolete methods.
- DCHECK(!method->IsObsolete());
+ if (UNLIKELY(method->IsObsolete())) {
+ // We shouldn't be calling this with obsolete methods except for native obsolete methods for
+ // which we need to use the oat method to figure out how large the quick frame is.
+ DCHECK(method->IsNative()) << "We should only be finding the OatMethod of obsolete methods in "
+ << "order to allow stack walking. Other obsolete methods should "
+ << "never need to access this information.";
+ DCHECK_EQ(pointer_size, kRuntimePointerSize) << "Obsolete method in compiler!";
+ return FindOatMethodFromDexFileFor(method, found);
+ }
// Although we overwrite the trampoline of non-static methods, we may get here via the resolution
// method for direct methods (or virtual methods made direct).
mirror::Class* declaring_class = method->GetDeclaringClass();
@@ -490,7 +533,7 @@ bool ArtMethod::EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> param
const auto& proto_id = dex_file->GetMethodPrototype(method_id);
const DexFile::TypeList* proto_params = dex_file->GetProtoParameters(proto_id);
auto count = proto_params != nullptr ? proto_params->Size() : 0u;
- auto param_len = params.Get() != nullptr ? params->GetLength() : 0u;
+ auto param_len = params != nullptr ? params->GetLength() : 0u;
if (param_len != count) {
return false;
}
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 383630363e..3d51fdde94 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -432,6 +432,7 @@ class ArtMethod FINAL {
}
ProfilingInfo* GetProfilingInfo(PointerSize pointer_size) {
+ DCHECK(!IsNative());
return reinterpret_cast<ProfilingInfo*>(GetDataPtrSize(pointer_size));
}
diff --git a/runtime/base/mutex.cc b/runtime/base/mutex.cc
index 6e102be1a1..7bba944ca8 100644
--- a/runtime/base/mutex.cc
+++ b/runtime/base/mutex.cc
@@ -72,6 +72,7 @@ Uninterruptible Roles::uninterruptible_;
ReaderWriterMutex* Locks::jni_globals_lock_ = nullptr;
Mutex* Locks::jni_weak_globals_lock_ = nullptr;
ReaderWriterMutex* Locks::dex_lock_ = nullptr;
+std::vector<BaseMutex*> Locks::expected_mutexes_on_weak_ref_access_;
struct AllMutexData {
// A guard for all_mutexes_ that's not a mutex (Mutexes must CAS to acquire and busy wait).
@@ -146,7 +147,10 @@ class ScopedContentionRecorder FINAL : public ValueObject {
const uint64_t start_nano_time_;
};
-BaseMutex::BaseMutex(const char* name, LockLevel level) : level_(level), name_(name) {
+BaseMutex::BaseMutex(const char* name, LockLevel level)
+ : level_(level),
+ name_(name),
+ should_respond_to_empty_checkpoint_request_(false) {
if (kLogLockContentions) {
ScopedAllMutexesLock mu(this);
std::set<BaseMutex*>** all_mutexes_ptr = &gAllMutexData->all_mutexes;
@@ -377,6 +381,9 @@ void Mutex::ExclusiveLock(Thread* self) {
// Failed to acquire, hang up.
ScopedContentionRecorder scr(this, SafeGetTid(self), GetExclusiveOwnerTid());
num_contenders_++;
+ if (UNLIKELY(should_respond_to_empty_checkpoint_request_)) {
+ self->CheckEmptyCheckpointFromMutex();
+ }
if (futex(state_.Address(), FUTEX_WAIT, 1, nullptr, nullptr, 0) != 0) {
// EAGAIN and EINTR both indicate a spurious failure, try again from the beginning.
// We don't use TEMP_FAILURE_RETRY so we can intentionally retry to acquire the lock.
@@ -519,6 +526,18 @@ std::ostream& operator<<(std::ostream& os, const Mutex& mu) {
return os;
}
+void Mutex::WakeupToRespondToEmptyCheckpoint() {
+#if ART_USE_FUTEXES
+ // Wake up all the waiters so they will respond to the emtpy checkpoint.
+ DCHECK(should_respond_to_empty_checkpoint_request_);
+ if (UNLIKELY(num_contenders_.LoadRelaxed() > 0)) {
+ futex(state_.Address(), FUTEX_WAKE, -1, nullptr, nullptr, 0);
+ }
+#else
+ LOG(FATAL) << "Non futex case isn't supported.";
+#endif
+}
+
ReaderWriterMutex::ReaderWriterMutex(const char* name, LockLevel level)
: BaseMutex(name, level)
#if ART_USE_FUTEXES
@@ -563,6 +582,9 @@ void ReaderWriterMutex::ExclusiveLock(Thread* self) {
// Failed to acquire, hang up.
ScopedContentionRecorder scr(this, SafeGetTid(self), GetExclusiveOwnerTid());
++num_pending_writers_;
+ if (UNLIKELY(should_respond_to_empty_checkpoint_request_)) {
+ self->CheckEmptyCheckpointFromMutex();
+ }
if (futex(state_.Address(), FUTEX_WAIT, cur_state, nullptr, nullptr, 0) != 0) {
// EAGAIN and EINTR both indicate a spurious failure, try again from the beginning.
// We don't use TEMP_FAILURE_RETRY so we can intentionally retry to acquire the lock.
@@ -639,6 +661,9 @@ bool ReaderWriterMutex::ExclusiveLockWithTimeout(Thread* self, int64_t ms, int32
}
ScopedContentionRecorder scr(this, SafeGetTid(self), GetExclusiveOwnerTid());
++num_pending_writers_;
+ if (UNLIKELY(should_respond_to_empty_checkpoint_request_)) {
+ self->CheckEmptyCheckpointFromMutex();
+ }
if (futex(state_.Address(), FUTEX_WAIT, cur_state, &rel_ts, nullptr, 0) != 0) {
if (errno == ETIMEDOUT) {
--num_pending_writers_;
@@ -677,6 +702,9 @@ void ReaderWriterMutex::HandleSharedLockContention(Thread* self, int32_t cur_sta
// Owner holds it exclusively, hang up.
ScopedContentionRecorder scr(this, GetExclusiveOwnerTid(), SafeGetTid(self));
++num_pending_readers_;
+ if (UNLIKELY(should_respond_to_empty_checkpoint_request_)) {
+ self->CheckEmptyCheckpointFromMutex();
+ }
if (futex(state_.Address(), FUTEX_WAIT, cur_state, nullptr, nullptr, 0) != 0) {
if (errno != EAGAIN && errno != EINTR) {
PLOG(FATAL) << "futex wait failed for " << name_;
@@ -749,6 +777,19 @@ std::ostream& operator<<(std::ostream& os, const MutatorMutex& mu) {
return os;
}
+void ReaderWriterMutex::WakeupToRespondToEmptyCheckpoint() {
+#if ART_USE_FUTEXES
+ // Wake up all the waiters so they will respond to the emtpy checkpoint.
+ DCHECK(should_respond_to_empty_checkpoint_request_);
+ if (UNLIKELY(num_pending_readers_.LoadRelaxed() > 0 ||
+ num_pending_writers_.LoadRelaxed() > 0)) {
+ futex(state_.Address(), FUTEX_WAKE, -1, nullptr, nullptr, 0);
+ }
+#else
+ LOG(FATAL) << "Non futex case isn't supported.";
+#endif
+}
+
ConditionVariable::ConditionVariable(const char* name, Mutex& guard)
: name_(name), guard_(guard) {
#if ART_USE_FUTEXES
@@ -1121,6 +1162,12 @@ void Locks::Init() {
#undef UPDATE_CURRENT_LOCK_LEVEL
+ // List of mutexes that we may hold when accessing a weak ref.
+ dex_lock_->SetShouldRespondToEmptyCheckpointRequest(true);
+ expected_mutexes_on_weak_ref_access_.push_back(dex_lock_);
+ classlinker_classes_lock_->SetShouldRespondToEmptyCheckpointRequest(true);
+ expected_mutexes_on_weak_ref_access_.push_back(classlinker_classes_lock_);
+
InitConditions();
}
}
diff --git a/runtime/base/mutex.h b/runtime/base/mutex.h
index ffe18c6a50..9b6938f9bf 100644
--- a/runtime/base/mutex.h
+++ b/runtime/base/mutex.h
@@ -152,6 +152,16 @@ class BaseMutex {
static void DumpAll(std::ostream& os);
+ bool ShouldRespondToEmptyCheckpointRequest() const {
+ return should_respond_to_empty_checkpoint_request_;
+ }
+
+ void SetShouldRespondToEmptyCheckpointRequest(bool value) {
+ should_respond_to_empty_checkpoint_request_ = value;
+ }
+
+ virtual void WakeupToRespondToEmptyCheckpoint() = 0;
+
protected:
friend class ConditionVariable;
@@ -168,6 +178,7 @@ class BaseMutex {
const LockLevel level_; // Support for lock hierarchy.
const char* const name_;
+ bool should_respond_to_empty_checkpoint_request_;
// A log entry that records contention but makes no guarantee that either tid will be held live.
struct ContentionLogEntry {
@@ -266,6 +277,8 @@ class LOCKABLE Mutex : public BaseMutex {
// For negative capabilities in clang annotations.
const Mutex& operator!() const { return *this; }
+ void WakeupToRespondToEmptyCheckpoint() OVERRIDE;
+
private:
#if ART_USE_FUTEXES
// 0 is unheld, 1 is held.
@@ -386,6 +399,8 @@ class SHARED_LOCKABLE ReaderWriterMutex : public BaseMutex {
// For negative capabilities in clang annotations.
const ReaderWriterMutex& operator!() const { return *this; }
+ void WakeupToRespondToEmptyCheckpoint() OVERRIDE;
+
private:
#if ART_USE_FUTEXES
// Out-of-inline path for handling contention for a SharedLock.
@@ -713,6 +728,12 @@ class Locks {
// Have an exclusive logging thread.
static Mutex* logging_lock_ ACQUIRED_AFTER(unexpected_signal_lock_);
+
+ // List of mutexes that we expect a thread may hold when accessing weak refs. This is used to
+ // avoid a deadlock in the empty checkpoint while weak ref access is disabled (b/34964016). If we
+ // encounter an unexpected mutex on accessing weak refs,
+ // Thread::CheckEmptyCheckpointFromWeakRefAccess will detect it.
+ static std::vector<BaseMutex*> expected_mutexes_on_weak_ref_access_;
};
class Roles {
diff --git a/runtime/cha.cc b/runtime/cha.cc
index d11b12f700..eaba01b2ce 100644
--- a/runtime/cha.cc
+++ b/runtime/cha.cc
@@ -200,7 +200,8 @@ void ClassHierarchyAnalysis::VerifyNonSingleImplementation(mirror::Class* verify
if (verify_method != excluded_method) {
DCHECK(!verify_method->HasSingleImplementation())
<< "class: " << verify_class->PrettyClass()
- << " verify_method: " << verify_method->PrettyMethod(true);
+ << " verify_method: " << verify_method->PrettyMethod(true)
+ << " excluded_method: " << excluded_method->PrettyMethod(true);
if (verify_method->IsAbstract()) {
DCHECK(verify_method->GetSingleImplementation(image_pointer_size) == nullptr);
}
@@ -257,9 +258,6 @@ void ClassHierarchyAnalysis::CheckSingleImplementationInfo(
return;
}
- // Native methods don't have single-implementation flag set.
- DCHECK(!method_in_super->IsNative());
-
uint16_t method_index = method_in_super->GetMethodIndex();
if (method_in_super->IsAbstract()) {
if (kIsDebugBuild) {
@@ -374,12 +372,12 @@ void ClassHierarchyAnalysis::InitSingleImplementationFlag(Handle<mirror::Class>
// used for static methods or methods of final classes.
return;
}
- if (method->IsNative()) {
- // Native method's invocation overhead is already high and it
- // cannot be inlined. It's not worthwhile to devirtualize the
- // call which can add a deoptimization point.
- DCHECK(!method->HasSingleImplementation());
- } else if (method->IsAbstract()) {
+ if (method->IsAbstract()) {
+ // single-implementation of abstract method shares the same field
+ // that's used for JNI function of native method. It's fine since a method
+ // cannot be both abstract and native.
+ DCHECK(!method->IsNative()) << "Abstract method cannot be native";
+
if (method->GetDeclaringClass()->IsInstantiable()) {
// Rare case, but we do accept it (such as 800-smali/smali/b_26143249.smali).
// Do not attempt to devirtualize it.
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 7db83688e2..d02cf17d44 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -70,6 +70,7 @@
#include "jni_internal.h"
#include "leb128.h"
#include "linear_alloc.h"
+#include "mirror/call_site.h"
#include "mirror/class.h"
#include "mirror/class-inl.h"
#include "mirror/class_ext.h"
@@ -82,6 +83,7 @@
#include "mirror/method.h"
#include "mirror/method_type.h"
#include "mirror/method_handle_impl.h"
+#include "mirror/method_handles_lookup.h"
#include "mirror/object-inl.h"
#include "mirror/object_array-inl.h"
#include "mirror/proxy.h"
@@ -405,7 +407,7 @@ bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> b
auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
Handle<mirror::Class> java_lang_Class(hs.NewHandle(down_cast<mirror::Class*>(
heap->AllocNonMovableObject<true>(self, nullptr, class_class_size, VoidFunctor()))));
- CHECK(java_lang_Class.Get() != nullptr);
+ CHECK(java_lang_Class != nullptr);
mirror::Class::SetClassClass(java_lang_Class.Get());
java_lang_Class->SetClass(java_lang_Class.Get());
if (kUseBakerReadBarrier) {
@@ -425,7 +427,7 @@ bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> b
// java_lang_Object comes next so that object_array_class can be created.
Handle<mirror::Class> java_lang_Object(hs.NewHandle(
AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
- CHECK(java_lang_Object.Get() != nullptr);
+ CHECK(java_lang_Object != nullptr);
// backfill Object as the super class of Class.
java_lang_Class->SetSuperClass(java_lang_Object.Get());
mirror::Class::SetStatus(java_lang_Object, mirror::Class::kStatusLoaded, self);
@@ -624,9 +626,9 @@ bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> b
// Setup the single, global copy of "iftable".
auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
- CHECK(java_lang_Cloneable.Get() != nullptr);
+ CHECK(java_lang_Cloneable != nullptr);
auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
- CHECK(java_io_Serializable.Get() != nullptr);
+ CHECK(java_io_Serializable != nullptr);
// We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
// crawl up and explicitly list all of the supers as well.
array_iftable_.Read()->SetInterface(0, java_lang_Cloneable.Get());
@@ -695,6 +697,18 @@ bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> b
SetClassRoot(kJavaLangInvokeMethodHandleImpl, class_root);
mirror::MethodHandleImpl::SetClass(class_root);
+ // Create java.lang.invoke.MethodHandles.Lookup.class root
+ class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
+ CHECK(class_root != nullptr);
+ SetClassRoot(kJavaLangInvokeMethodHandlesLookup, class_root);
+ mirror::MethodHandlesLookup::SetClass(class_root);
+
+ // Create java.lang.invoke.CallSite.class root
+ class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
+ CHECK(class_root != nullptr);
+ SetClassRoot(kJavaLangInvokeCallSite, class_root);
+ mirror::CallSite::SetClass(class_root);
+
class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
CHECK(class_root != nullptr);
SetClassRoot(kDalvikSystemEmulatedStackFrame, class_root);
@@ -981,6 +995,8 @@ bool ClassLinker::InitFromBootImage(std::string* error_msg) {
mirror::Method::SetArrayClass(GetClassRoot(kJavaLangReflectMethodArrayClass));
mirror::MethodType::SetClass(GetClassRoot(kJavaLangInvokeMethodType));
mirror::MethodHandleImpl::SetClass(GetClassRoot(kJavaLangInvokeMethodHandleImpl));
+ mirror::MethodHandlesLookup::SetClass(GetClassRoot(kJavaLangInvokeMethodHandlesLookup));
+ mirror::CallSite::SetClass(GetClassRoot(kJavaLangInvokeCallSite));
mirror::Reference::SetClass(GetClassRoot(kJavaLangRefReference));
mirror::BooleanArray::SetArrayClass(GetClassRoot(kBooleanArrayClass));
mirror::ByteArray::SetArrayClass(GetClassRoot(kByteArrayClass));
@@ -1231,12 +1247,13 @@ bool ClassLinker::UpdateAppImageClassLoadersAndDexCaches(
if (dex_file->NumProtoIds() < num_method_types) {
num_method_types = dex_file->NumProtoIds();
}
-
+ const size_t num_call_sites = dex_file->NumCallSiteIds();
CHECK_EQ(num_strings, dex_cache->NumStrings());
CHECK_EQ(num_types, dex_cache->NumResolvedTypes());
CHECK_EQ(num_methods, dex_cache->NumResolvedMethods());
CHECK_EQ(num_fields, dex_cache->NumResolvedFields());
CHECK_EQ(num_method_types, dex_cache->NumResolvedMethodTypes());
+ CHECK_EQ(num_call_sites, dex_cache->NumResolvedCallSites());
DexCacheArraysLayout layout(image_pointer_size_, dex_file);
uint8_t* const raw_arrays = oat_dex_file->GetDexCacheArrays();
if (num_strings != 0u) {
@@ -1316,6 +1333,22 @@ bool ClassLinker::UpdateAppImageClassLoadersAndDexCaches(
mirror::MethodTypeDexCachePair::Initialize(method_types);
dex_cache->SetResolvedMethodTypes(method_types);
}
+ if (num_call_sites != 0u) {
+ GcRoot<mirror::CallSite>* const image_resolved_call_sites =
+ dex_cache->GetResolvedCallSites();
+ GcRoot<mirror::CallSite>* const call_sites =
+ reinterpret_cast<GcRoot<mirror::CallSite>*>(raw_arrays + layout.CallSitesOffset());
+ for (size_t j = 0; kIsDebugBuild && j < num_call_sites; ++j) {
+ DCHECK(call_sites[j].IsNull());
+ }
+ CopyNonNull(image_resolved_call_sites,
+ num_call_sites,
+ call_sites,
+ [](const GcRoot<mirror::CallSite>& elem) {
+ return elem.IsNull();
+ });
+ dex_cache->SetResolvedCallSites(call_sites);
+ }
}
{
WriterMutexLock mu2(self, *Locks::dex_lock_);
@@ -1615,7 +1648,7 @@ bool ClassLinker::AddImageSpace(
DCHECK(out_dex_files != nullptr);
DCHECK(error_msg != nullptr);
const uint64_t start_time = NanoTime();
- const bool app_image = class_loader.Get() != nullptr;
+ const bool app_image = class_loader != nullptr;
const ImageHeader& header = space->GetImageHeader();
ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
DCHECK(dex_caches_object != nullptr);
@@ -1645,7 +1678,7 @@ bool ClassLinker::AddImageSpace(
"Class loader should be the last image root.");
MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
app_image ? header.GetImageRoot(ImageHeader::kClassLoader)->AsClassLoader() : nullptr));
- DCHECK(class_roots.Get() != nullptr);
+ DCHECK(class_roots != nullptr);
if (class_roots->GetLength() != static_cast<int32_t>(kClassRootsMax)) {
*error_msg = StringPrintf("Expected %d class roots but got %d",
class_roots->GetLength(),
@@ -2074,7 +2107,7 @@ void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
ObjPtr<mirror::Class> array_of_class = FindArrayClass(self, &class_type);
classes.Assign(
mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
- CHECK(classes.Get() != nullptr); // OOME.
+ CHECK(classes != nullptr); // OOME.
GetClassInToObjectArray accumulator(classes.Get());
VisitClasses(&accumulator);
if (accumulator.Succeeded()) {
@@ -2115,6 +2148,8 @@ ClassLinker::~ClassLinker() {
mirror::ShortArray::ResetArrayClass();
mirror::MethodType::ResetClass();
mirror::MethodHandleImpl::ResetClass();
+ mirror::MethodHandlesLookup::ResetClass();
+ mirror::CallSite::ResetClass();
mirror::EmulatedStackFrame::ResetClass();
Thread* const self = Thread::Current();
for (const ClassLoaderData& data : class_loaders_) {
@@ -2152,7 +2187,7 @@ mirror::DexCache* ClassLinker::AllocDexCache(ObjPtr<mirror::String>* out_locatio
DCHECK(out_location != nullptr);
auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
GetClassRoot(kJavaLangDexCache)->AllocObject(self))));
- if (dex_cache.Get() == nullptr) {
+ if (dex_cache == nullptr) {
self->AssertPendingOOMException();
return nullptr;
}
@@ -2453,7 +2488,7 @@ mirror::Class* ClassLinker::FindClass(Thread* self,
return EnsureResolved(self, descriptor, klass);
}
// Class is not yet loaded.
- if (descriptor[0] != '[' && class_loader.Get() == nullptr) {
+ if (descriptor[0] != '[' && class_loader == nullptr) {
// Non-array class and the boot class loader, search the boot class path.
ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
if (pair.second != nullptr) {
@@ -2616,14 +2651,14 @@ mirror::Class* ClassLinker::DefineClass(Thread* self,
}
}
- if (klass.Get() == nullptr) {
+ if (klass == nullptr) {
// Allocate a class with the status of not ready.
// Interface object should get the right size here. Regular class will
// figure out the right size later and be replaced with one of the right
// size when the class becomes resolved.
klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
}
- if (UNLIKELY(klass.Get() == nullptr)) {
+ if (UNLIKELY(klass == nullptr)) {
self->AssertPendingOOMException();
return nullptr;
}
@@ -2716,7 +2751,7 @@ mirror::Class* ClassLinker::DefineClass(Thread* self,
return nullptr;
}
self->AssertNoPendingException();
- CHECK(h_new_class.Get() != nullptr) << descriptor;
+ CHECK(h_new_class != nullptr) << descriptor;
CHECK(h_new_class->IsResolved() && !h_new_class->IsErroneousResolved()) << descriptor;
// Instrumentation may have updated entrypoints for all methods of all
@@ -2997,7 +3032,7 @@ void ClassLinker::SetupClass(const DexFile& dex_file,
const DexFile::ClassDef& dex_class_def,
Handle<mirror::Class> klass,
ObjPtr<mirror::ClassLoader> class_loader) {
- CHECK(klass.Get() != nullptr);
+ CHECK(klass != nullptr);
CHECK(klass->GetDexCache() != nullptr);
CHECK_EQ(mirror::Class::kStatusNotReady, klass->GetStatus());
const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
@@ -3112,6 +3147,7 @@ void ClassLinker::LoadClassMembers(Thread* self,
last_field_idx = field_idx;
}
}
+
// Load instance fields.
LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
allocator,
@@ -3128,6 +3164,7 @@ void ClassLinker::LoadClassMembers(Thread* self,
last_field_idx = field_idx;
}
}
+
if (UNLIKELY(num_sfields != it.NumStaticFields()) ||
UNLIKELY(num_ifields != it.NumInstanceFields())) {
LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
@@ -3367,7 +3404,7 @@ ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
WriterMutexLock mu(self, *Locks::dex_lock_);
old_data = FindDexCacheDataLocked(dex_file);
old_dex_cache = DecodeDexCache(self, old_data);
- if (old_dex_cache == nullptr && h_dex_cache.Get() != nullptr) {
+ if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
// Do InitializeDexCache while holding dex lock to make sure two threads don't call it at the
// same time with the same dex cache. Since the .bss is shared this can cause failing DCHECK
// that the arrays are null.
@@ -3383,12 +3420,12 @@ ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
if (old_dex_cache != nullptr) {
// Another thread managed to initialize the dex cache faster, so use that DexCache.
// If this thread encountered OOME, ignore it.
- DCHECK_EQ(h_dex_cache.Get() == nullptr, self->IsExceptionPending());
+ DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
self->ClearException();
// We cannot call EnsureSameClassLoader() while holding the dex_lock_.
return EnsureSameClassLoader(self, old_dex_cache, old_data, h_class_loader.Get());
}
- if (h_dex_cache.Get() == nullptr) {
+ if (h_dex_cache == nullptr) {
self->AssertPendingOOMException();
return nullptr;
}
@@ -3517,12 +3554,12 @@ mirror::Class* ClassLinker::CreateArrayClass(Thread* self, const char* descripto
StackHandleScope<2> hs(self);
MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
class_loader)));
- if (component_type.Get() == nullptr) {
+ if (component_type == nullptr) {
DCHECK(self->IsExceptionPending());
// We need to accept erroneous classes as component types.
const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
- if (component_type.Get() == nullptr) {
+ if (component_type == nullptr) {
DCHECK(self->IsExceptionPending());
return nullptr;
} else {
@@ -3583,9 +3620,9 @@ mirror::Class* ClassLinker::CreateArrayClass(Thread* self, const char* descripto
new_class.Assign(GetClassRoot(kLongArrayClass));
}
}
- if (new_class.Get() == nullptr) {
+ if (new_class == nullptr) {
new_class.Assign(AllocClass(self, mirror::Array::ClassSize(image_pointer_size_)));
- if (new_class.Get() == nullptr) {
+ if (new_class == nullptr) {
self->AssertPendingOOMException();
return nullptr;
}
@@ -3818,8 +3855,8 @@ bool ClassLinker::AttemptSupertypeVerification(Thread* self,
Handle<mirror::Class> klass,
Handle<mirror::Class> supertype) {
DCHECK(self != nullptr);
- DCHECK(klass.Get() != nullptr);
- DCHECK(supertype.Get() != nullptr);
+ DCHECK(klass != nullptr);
+ DCHECK(supertype != nullptr);
if (!supertype->IsVerified() && !supertype->IsErroneous()) {
VerifyClass(self, supertype);
@@ -3836,13 +3873,13 @@ bool ClassLinker::AttemptSupertypeVerification(Thread* self,
LOG(WARNING) << error_msg << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
StackHandleScope<1> hs(self);
Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
- if (cause.Get() != nullptr) {
+ if (cause != nullptr) {
// Set during VerifyClass call (if at all).
self->ClearException();
}
// Change into a verify error.
ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
- if (cause.Get() != nullptr) {
+ if (cause != nullptr) {
self->GetException()->SetCause(cause.Get());
}
ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
@@ -3921,7 +3958,7 @@ verifier::MethodVerifier::FailureKind ClassLinker::VerifyClass(
StackHandleScope<2> hs(self);
MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
// If we have a superclass and we get a hard verification failure we can return immediately.
- if (supertype.Get() != nullptr && !AttemptSupertypeVerification(self, klass, supertype)) {
+ if (supertype != nullptr && !AttemptSupertypeVerification(self, klass, supertype)) {
CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
return verifier::MethodVerifier::kHardFailure;
}
@@ -3936,14 +3973,14 @@ verifier::MethodVerifier::FailureKind ClassLinker::VerifyClass(
// but choose not to for an optimization. If the interfaces is being verified due to a class
// initialization (which would need all the default interfaces to be verified) the class code
// will trigger the recursive verification anyway.
- if ((supertype.Get() == nullptr || supertype->IsVerified()) // See (1)
+ if ((supertype == nullptr || supertype->IsVerified()) // See (1)
&& !klass->IsInterface()) { // See (2)
int32_t iftable_count = klass->GetIfTableCount();
MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
// Loop through all interfaces this class has defined. It doesn't matter the order.
for (int32_t i = 0; i < iftable_count; i++) {
iface.Assign(klass->GetIfTable()->GetInterface(i));
- DCHECK(iface.Get() != nullptr);
+ DCHECK(iface != nullptr);
// We only care if we have default interfaces and can skip if we are already verified...
if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
continue;
@@ -3963,7 +4000,7 @@ verifier::MethodVerifier::FailureKind ClassLinker::VerifyClass(
// At this point if verification failed, then supertype is the "first" supertype that failed
// verification (without a specific order). If verification succeeded, then supertype is either
// null or the original superclass of klass and is verified.
- DCHECK(supertype.Get() == nullptr ||
+ DCHECK(supertype == nullptr ||
supertype.Get() == klass->GetSuperClass() ||
!supertype->IsVerified());
@@ -4004,7 +4041,7 @@ verifier::MethodVerifier::FailureKind ClassLinker::VerifyClass(
if (verifier_failure == verifier::MethodVerifier::kNoFailure) {
// Even though there were no verifier failures we need to respect whether the super-class and
// super-default-interfaces were verified or requiring runtime reverification.
- if (supertype.Get() == nullptr || supertype->IsVerified()) {
+ if (supertype == nullptr || supertype->IsVerified()) {
mirror::Class::SetStatus(klass, mirror::Class::kStatusVerified, self);
} else {
CHECK_EQ(supertype->GetStatus(), mirror::Class::kStatusRetryVerificationAtRuntime);
@@ -4187,7 +4224,7 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable&
StackHandleScope<10> hs(self);
MutableHandle<mirror::Class> klass(hs.NewHandle(
AllocClass(self, GetClassRoot(kJavaLangClass), sizeof(mirror::Class))));
- if (klass.Get() == nullptr) {
+ if (klass == nullptr) {
CHECK(self->IsExceptionPending()); // OOME.
return nullptr;
}
@@ -4611,7 +4648,7 @@ bool ClassLinker::InitializeClass(Thread* self, Handle<mirror::Class> klass,
MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
for (size_t i = 0; i < num_direct_interfaces; i++) {
handle_scope_iface.Assign(mirror::Class::GetDirectInterface(self, klass.Get(), i));
- CHECK(handle_scope_iface.Get() != nullptr);
+ CHECK(handle_scope_iface != nullptr);
CHECK(handle_scope_iface->IsInterface());
if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
// We have already done this for this interface. Skip it.
@@ -4890,7 +4927,7 @@ static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
{
StackHandleScope<1> hs(self);
Handle<mirror::Class> return_type(hs.NewHandle(method1->GetReturnType(true /* resolve */)));
- if (UNLIKELY(return_type.Get() == nullptr)) {
+ if (UNLIKELY(return_type == nullptr)) {
ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
return false;
}
@@ -4940,7 +4977,7 @@ static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
Handle<mirror::Class> param_type(hs.NewHandle(
method1->GetClassFromTypeIndex(param_type_idx, true /* resolve */)));
- if (UNLIKELY(param_type.Get() == nullptr)) {
+ if (UNLIKELY(param_type == nullptr)) {
ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
method1, i, param_type_idx);
return false;
@@ -5022,7 +5059,7 @@ bool ClassLinker::EnsureInitialized(Thread* self,
Handle<mirror::Class> c,
bool can_init_fields,
bool can_init_parents) {
- DCHECK(c.Get() != nullptr);
+ DCHECK(c != nullptr);
if (c->IsInitialized()) {
EnsureSkipAccessChecksMethods(c, image_pointer_size_);
self->AssertNoPendingException();
@@ -5202,7 +5239,7 @@ bool ClassLinker::LinkClass(Thread* self,
klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
klass->SetSFieldsPtrUnchecked(nullptr);
klass->SetIFieldsPtrUnchecked(nullptr);
- if (UNLIKELY(h_new_class.Get() == nullptr)) {
+ if (UNLIKELY(h_new_class == nullptr)) {
self->AssertPendingOOMException();
mirror::Class::SetStatus(klass, mirror::Class::kStatusErrorUnresolved, self);
return false;
@@ -5746,7 +5783,7 @@ bool ClassLinker::LinkVirtualMethods(
MutableHandle<mirror::PointerArray> vtable;
if (super_class->ShouldHaveEmbeddedVTable()) {
vtable = hs.NewHandle(AllocPointerArray(self, max_count));
- if (UNLIKELY(vtable.Get() == nullptr)) {
+ if (UNLIKELY(vtable == nullptr)) {
self->AssertPendingOOMException();
return false;
}
@@ -5775,7 +5812,7 @@ bool ClassLinker::LinkVirtualMethods(
}
vtable = hs.NewHandle(down_cast<mirror::PointerArray*>(
super_vtable->CopyOf(self, max_count)));
- if (UNLIKELY(vtable.Get() == nullptr)) {
+ if (UNLIKELY(vtable == nullptr)) {
self->AssertPendingOOMException();
return false;
}
@@ -5911,7 +5948,7 @@ bool ClassLinker::LinkVirtualMethods(
CHECK_LE(actual_count, max_count);
if (actual_count < max_count) {
vtable.Assign(down_cast<mirror::PointerArray*>(vtable->CopyOf(self, actual_count)));
- if (UNLIKELY(vtable.Get() == nullptr)) {
+ if (UNLIKELY(vtable == nullptr)) {
self->AssertPendingOOMException();
return false;
}
@@ -5964,8 +6001,8 @@ static bool ContainsOverridingMethodOf(Thread* self,
PointerSize image_pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK(self != nullptr);
- DCHECK(iface.Get() != nullptr);
- DCHECK(iftable.Get() != nullptr);
+ DCHECK(iface != nullptr);
+ DCHECK(iftable != nullptr);
DCHECK_GE(ifstart, 0u);
DCHECK_LT(ifstart, iftable->Count());
DCHECK_EQ(iface.Get(), iftable->GetInterface(ifstart));
@@ -6050,7 +6087,7 @@ ClassLinker::DefaultMethodSearchResult ClassLinker::FindDefaultMethodImplementat
<< "This will be a fatal error in subsequent versions of android. "
<< "Continuing anyway.";
}
- if (UNLIKELY(chosen_iface.Get() != nullptr)) {
+ if (UNLIKELY(chosen_iface != nullptr)) {
// We have multiple default impls of the same method. This is a potential default conflict.
// We need to check if this possibly conflicting method is either a superclass of the chosen
// default implementation or is overridden by a non-default interface method. In either case
@@ -6505,7 +6542,7 @@ bool ClassLinker::SetupInterfaceLookupTable(Thread* self, Handle<mirror::Class>
StackHandleScope<1> hs(self);
const bool has_superclass = klass->HasSuperClass();
const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
- const bool have_interfaces = interfaces.Get() != nullptr;
+ const bool have_interfaces = interfaces != nullptr;
const size_t num_interfaces =
have_interfaces ? interfaces->GetLength() : klass->NumDirectInterfaces();
if (num_interfaces == 0) {
@@ -6551,7 +6588,7 @@ bool ClassLinker::SetupInterfaceLookupTable(Thread* self, Handle<mirror::Class>
}
// Create the interface function table.
MutableHandle<mirror::IfTable> iftable(hs.NewHandle(AllocIfTable(self, ifcount)));
- if (UNLIKELY(iftable.Get() == nullptr)) {
+ if (UNLIKELY(iftable == nullptr)) {
self->AssertPendingOOMException();
return false;
}
@@ -6589,7 +6626,7 @@ bool ClassLinker::SetupInterfaceLookupTable(Thread* self, Handle<mirror::Class>
DCHECK_NE(num_interfaces, 0U);
iftable.Assign(down_cast<mirror::IfTable*>(
iftable->CopyOf(self, new_ifcount * mirror::IfTable::kMax)));
- if (UNLIKELY(iftable.Get() == nullptr)) {
+ if (UNLIKELY(iftable == nullptr)) {
self->AssertPendingOOMException();
return false;
}
@@ -6630,7 +6667,7 @@ static void CheckClassOwnsVTableEntries(Thread* self,
Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
- int32_t super_vtable_length = (superclass.Get() != nullptr) ? superclass->GetVTableLength() : 0;
+ int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
CHECK(m != nullptr);
@@ -7289,7 +7326,7 @@ bool ClassLinker::LinkInterfaceMethods(
// For a new interface, however, we need the whole vtable in case a new
// interface method is implemented in the whole superclass.
using_virtuals = false;
- DCHECK(vtable.Get() != nullptr);
+ DCHECK(vtable != nullptr);
input_vtable_array = vtable;
input_array_length = input_vtable_array->GetLength();
}
@@ -7432,7 +7469,7 @@ bool ClassLinker::LinkInterfaceMethods(
if (fill_tables) {
vtable.Assign(helper.UpdateVtable(default_translations, vtable.Get()));
- if (UNLIKELY(vtable.Get() == nullptr)) {
+ if (UNLIKELY(vtable == nullptr)) {
// The helper has already called self->AssertPendingOOMException();
return false;
}
@@ -7452,12 +7489,12 @@ bool ClassLinker::LinkInterfaceMethods(
}
bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
- CHECK(klass.Get() != nullptr);
+ CHECK(klass != nullptr);
return LinkFields(self, klass, false, nullptr);
}
bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
- CHECK(klass.Get() != nullptr);
+ CHECK(klass != nullptr);
return LinkFields(self, klass, true, class_size);
}
@@ -7713,7 +7750,7 @@ void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
mirror::String* ClassLinker::ResolveString(const DexFile& dex_file,
dex::StringIndex string_idx,
Handle<mirror::DexCache> dex_cache) {
- DCHECK(dex_cache.Get() != nullptr);
+ DCHECK(dex_cache != nullptr);
Thread::PoisonObjectPointersIfDebug();
ObjPtr<mirror::String> resolved = dex_cache->GetResolvedString(string_idx);
if (resolved != nullptr) {
@@ -7729,7 +7766,7 @@ mirror::String* ClassLinker::ResolveString(const DexFile& dex_file,
mirror::String* ClassLinker::LookupString(const DexFile& dex_file,
dex::StringIndex string_idx,
Handle<mirror::DexCache> dex_cache) {
- DCHECK(dex_cache.Get() != nullptr);
+ DCHECK(dex_cache != nullptr);
ObjPtr<mirror::String> resolved = dex_cache->GetResolvedString(string_idx);
if (resolved != nullptr) {
return resolved.Ptr();
@@ -7783,7 +7820,7 @@ mirror::Class* ClassLinker::ResolveType(const DexFile& dex_file,
dex::TypeIndex type_idx,
Handle<mirror::DexCache> dex_cache,
Handle<mirror::ClassLoader> class_loader) {
- DCHECK(dex_cache.Get() != nullptr);
+ DCHECK(dex_cache != nullptr);
Thread::PoisonObjectPointersIfDebug();
ObjPtr<mirror::Class> resolved = dex_cache->GetResolvedType(type_idx);
if (resolved == nullptr) {
@@ -7821,7 +7858,7 @@ ArtMethod* ClassLinker::ResolveMethod(const DexFile& dex_file,
Handle<mirror::ClassLoader> class_loader,
ArtMethod* referrer,
InvokeType type) {
- DCHECK(dex_cache.Get() != nullptr);
+ DCHECK(dex_cache != nullptr);
// Check for hit in the dex cache.
ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, image_pointer_size_);
Thread::PoisonObjectPointersIfDebug();
@@ -8060,7 +8097,7 @@ ArtField* ClassLinker::ResolveField(const DexFile& dex_file,
Handle<mirror::DexCache> dex_cache,
Handle<mirror::ClassLoader> class_loader,
bool is_static) {
- DCHECK(dex_cache.Get() != nullptr);
+ DCHECK(dex_cache != nullptr);
ArtField* resolved = dex_cache->GetResolvedField(field_idx, image_pointer_size_);
Thread::PoisonObjectPointersIfDebug();
if (resolved != nullptr) {
@@ -8101,7 +8138,7 @@ ArtField* ClassLinker::ResolveFieldJLS(const DexFile& dex_file,
uint32_t field_idx,
Handle<mirror::DexCache> dex_cache,
Handle<mirror::ClassLoader> class_loader) {
- DCHECK(dex_cache.Get() != nullptr);
+ DCHECK(dex_cache != nullptr);
ArtField* resolved = dex_cache->GetResolvedField(field_idx, image_pointer_size_);
Thread::PoisonObjectPointersIfDebug();
if (resolved != nullptr) {
@@ -8132,7 +8169,7 @@ mirror::MethodType* ClassLinker::ResolveMethodType(const DexFile& dex_file,
Handle<mirror::DexCache> dex_cache,
Handle<mirror::ClassLoader> class_loader) {
DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
- DCHECK(dex_cache.Get() != nullptr);
+ DCHECK(dex_cache != nullptr);
ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
if (resolved != nullptr) {
@@ -8146,7 +8183,7 @@ mirror::MethodType* ClassLinker::ResolveMethodType(const DexFile& dex_file,
const DexFile::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
Handle<mirror::Class> return_type(hs.NewHandle(
ResolveType(dex_file, proto_id.return_type_idx_, dex_cache, class_loader)));
- if (return_type.Get() == nullptr) {
+ if (return_type == nullptr) {
DCHECK(self->IsExceptionPending());
return nullptr;
}
@@ -8161,7 +8198,7 @@ mirror::MethodType* ClassLinker::ResolveMethodType(const DexFile& dex_file,
ObjPtr<mirror::Class> array_of_class = FindArrayClass(self, &class_type);
Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
- if (method_params.Get() == nullptr) {
+ if (method_params == nullptr) {
DCHECK(self->IsExceptionPending());
return nullptr;
}
@@ -8172,7 +8209,7 @@ mirror::MethodType* ClassLinker::ResolveMethodType(const DexFile& dex_file,
for (; it.HasNext(); it.Next()) {
const dex::TypeIndex type_idx = it.GetTypeIdx();
param_class.Assign(ResolveType(dex_file, type_idx, dex_cache, class_loader));
- if (param_class.Get() == nullptr) {
+ if (param_class == nullptr) {
DCHECK(self->IsExceptionPending());
return nullptr;
}
@@ -8189,6 +8226,148 @@ mirror::MethodType* ClassLinker::ResolveMethodType(const DexFile& dex_file,
return type.Get();
}
+mirror::MethodHandle* ClassLinker::ResolveMethodHandle(uint32_t method_handle_idx,
+ ArtMethod* referrer)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ Thread* const self = Thread::Current();
+ const DexFile* const dex_file = referrer->GetDexFile();
+ const DexFile::MethodHandleItem& mh = dex_file->GetMethodHandle(method_handle_idx);
+
+ union {
+ ArtField* field;
+ ArtMethod* method;
+ uintptr_t field_or_method;
+ } target;
+ uint32_t num_params;
+ mirror::MethodHandle::Kind kind;
+ DexFile::MethodHandleType handle_type =
+ static_cast<DexFile::MethodHandleType>(mh.method_handle_type_);
+ switch (handle_type) {
+ case DexFile::MethodHandleType::kStaticPut: {
+ kind = mirror::MethodHandle::Kind::kStaticPut;
+ target.field = ResolveField(mh.field_or_method_idx_, referrer, true /* is_static */);
+ num_params = 1;
+ break;
+ }
+ case DexFile::MethodHandleType::kStaticGet: {
+ kind = mirror::MethodHandle::Kind::kStaticGet;
+ target.field = ResolveField(mh.field_or_method_idx_, referrer, true /* is_static */);
+ num_params = 0;
+ break;
+ }
+ case DexFile::MethodHandleType::kInstancePut: {
+ kind = mirror::MethodHandle::Kind::kInstancePut;
+ target.field = ResolveField(mh.field_or_method_idx_, referrer, false /* is_static */);
+ num_params = 2;
+ break;
+ }
+ case DexFile::MethodHandleType::kInstanceGet: {
+ kind = mirror::MethodHandle::Kind::kInstanceGet;
+ target.field = ResolveField(mh.field_or_method_idx_, referrer, false /* is_static */);
+ num_params = 1;
+ break;
+ }
+ case DexFile::MethodHandleType::kInvokeStatic: {
+ kind = mirror::MethodHandle::Kind::kInvokeStatic;
+ target.method = ResolveMethod<kNoICCECheckForCache>(self,
+ mh.field_or_method_idx_,
+ referrer,
+ InvokeType::kStatic);
+ uint32_t shorty_length;
+ target.method->GetShorty(&shorty_length);
+ num_params = shorty_length - 1; // Remove 1 for return value.
+ break;
+ }
+ case DexFile::MethodHandleType::kInvokeInstance: {
+ kind = mirror::MethodHandle::Kind::kInvokeVirtual;
+ target.method = ResolveMethod<kNoICCECheckForCache>(self,
+ mh.field_or_method_idx_,
+ referrer,
+ InvokeType::kVirtual);
+ uint32_t shorty_length;
+ target.method->GetShorty(&shorty_length);
+ num_params = shorty_length - 1; // Remove 1 for return value.
+ break;
+ }
+ case DexFile::MethodHandleType::kInvokeConstructor: {
+ UNIMPLEMENTED(FATAL) << "Invoke constructor is implemented as a transform.";
+ num_params = 0;
+ }
+ }
+
+ StackHandleScope<5> hs(self);
+ ObjPtr<mirror::Class> class_type = mirror::Class::GetJavaLangClass();
+ ObjPtr<mirror::Class> array_of_class = FindArrayClass(self, &class_type);
+ Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
+ mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
+ if (method_params.Get() == nullptr) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+
+ Handle<mirror::Class> return_type;
+ switch (handle_type) {
+ case DexFile::MethodHandleType::kStaticPut: {
+ method_params->Set(0, target.field->GetType<true>());
+ return_type = hs.NewHandle(FindPrimitiveClass('V'));
+ break;
+ }
+ case DexFile::MethodHandleType::kStaticGet: {
+ return_type = hs.NewHandle(target.field->GetType<true>());
+ break;
+ }
+ case DexFile::MethodHandleType::kInstancePut: {
+ method_params->Set(0, target.field->GetDeclaringClass());
+ method_params->Set(1, target.field->GetType<true>());
+ return_type = hs.NewHandle(FindPrimitiveClass('V'));
+ break;
+ }
+ case DexFile::MethodHandleType::kInstanceGet: {
+ method_params->Set(0, target.field->GetDeclaringClass());
+ return_type = hs.NewHandle(target.field->GetType<true>());
+ break;
+ }
+ case DexFile::MethodHandleType::kInvokeStatic:
+ case DexFile::MethodHandleType::kInvokeInstance: {
+ // TODO(oth): This will not work for varargs methods as this
+ // requires instantiating a Transformer. This resolution step
+ // would be best done in managed code rather than in the run
+ // time (b/35235705)
+ Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
+ Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
+ DexFileParameterIterator it(*dex_file, target.method->GetPrototype());
+ for (int32_t i = 0; it.HasNext(); i++, it.Next()) {
+ const dex::TypeIndex type_idx = it.GetTypeIdx();
+ mirror::Class* klass = ResolveType(*dex_file, type_idx, dex_cache, class_loader);
+ if (nullptr == klass) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ method_params->Set(i, klass);
+ }
+ return_type = hs.NewHandle(target.method->GetReturnType(true));
+ break;
+ }
+ case DexFile::MethodHandleType::kInvokeConstructor: {
+ // TODO(oth): b/35235705
+ UNIMPLEMENTED(FATAL) << "Invoke constructor is implemented as a transform.";
+ }
+ }
+
+ if (return_type.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+
+ Handle<mirror::MethodType>
+ mt(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
+ if (mt.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ return mirror::MethodHandleImpl::Create(self, target.field_or_method, kind, mt);
+}
+
bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
return (entry_point == GetQuickResolutionStub()) ||
(quick_resolution_trampoline_ == entry_point);
@@ -8304,7 +8483,9 @@ const char* ClassLinker::GetClassRootDescriptor(ClassRoot class_root) {
"[Ljava/lang/reflect/Constructor;",
"[Ljava/lang/reflect/Field;",
"[Ljava/lang/reflect/Method;",
+ "Ljava/lang/invoke/CallSite;",
"Ljava/lang/invoke/MethodHandleImpl;",
+ "Ljava/lang/invoke/MethodHandles$Lookup;",
"Ljava/lang/invoke/MethodType;",
"Ljava/lang/ClassLoader;",
"Ljava/lang/Throwable;",
@@ -8352,7 +8533,7 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self,
jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->GetType<true>()));
- DCHECK(dex_elements_class.Get() != nullptr);
+ DCHECK(dex_elements_class != nullptr);
DCHECK(dex_elements_class->IsArrayClass());
Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
mirror::ObjectArray<mirror::Object>::Alloc(self,
@@ -8381,21 +8562,21 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self,
Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
self,
kDexFileIndexStart + 1));
- DCHECK(h_long_array.Get() != nullptr);
+ DCHECK(h_long_array != nullptr);
h_long_array->Set(kDexFileIndexStart, reinterpret_cast<intptr_t>(dex_file));
Handle<mirror::Object> h_dex_file = hs2.NewHandle(
cookie_field->GetDeclaringClass()->AllocObject(self));
- DCHECK(h_dex_file.Get() != nullptr);
+ DCHECK(h_dex_file != nullptr);
cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
Handle<mirror::String> h_file_name = hs2.NewHandle(
mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
- DCHECK(h_file_name.Get() != nullptr);
+ DCHECK(h_file_name != nullptr);
file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
- DCHECK(h_element.Get() != nullptr);
+ DCHECK(h_element != nullptr);
element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
h_dex_elements->Set(index, h_element.Get());
@@ -8406,7 +8587,7 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self,
// Create DexPathList.
Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
dex_elements_field->GetDeclaringClass()->AllocObject(self));
- DCHECK(h_dex_path_list.Get() != nullptr);
+ DCHECK(h_dex_path_list != nullptr);
// Set elements.
dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
@@ -8415,7 +8596,7 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self,
soa.Decode<mirror::Class>(WellKnownClasses::dalvik_system_PathClassLoader));
Handle<mirror::Object> h_path_class_loader = hs.NewHandle(
h_path_class_class->AllocObject(self));
- DCHECK(h_path_class_loader.Get() != nullptr);
+ DCHECK(h_path_class_loader != nullptr);
// Set DexPathList.
ArtField* path_list_field =
jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList);
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 62d3c29a19..e27a53d15c 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -55,6 +55,8 @@ namespace mirror {
class DexCacheMethodHandlesTest_Open_Test;
class DexCacheTest_Open_Test;
class IfTable;
+ class MethodHandle;
+ class MethodHandlesLookup;
class MethodType;
template<class T> class ObjectArray;
class StackTraceElement;
@@ -106,7 +108,9 @@ class ClassLinker {
kJavaLangReflectConstructorArrayClass,
kJavaLangReflectFieldArrayClass,
kJavaLangReflectMethodArrayClass,
+ kJavaLangInvokeCallSite,
kJavaLangInvokeMethodHandleImpl,
+ kJavaLangInvokeMethodHandlesLookup,
kJavaLangInvokeMethodType,
kJavaLangClassLoader,
kJavaLangThrowable,
@@ -366,6 +370,12 @@ class ClassLinker {
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Locks::dex_lock_, !Roles::uninterruptible_);
+ // Resolve a method handle with a given ID from the DexFile. The
+ // result is not cached in the DexCache as the instance will only be
+ // used once in most circumstances.
+ mirror::MethodHandle* ResolveMethodHandle(uint32_t method_handle_idx, ArtMethod* referrer)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
// Returns true on success, false if there's an exception pending.
// can_run_clinit=false allows the compiler to attempt to init a class,
// given the restriction that no <clinit> execution is possible.
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 03105cb6fb..07f3744b69 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -32,6 +32,7 @@
#include "entrypoints/entrypoint_utils-inl.h"
#include "gc/heap.h"
#include "mirror/accessible_object.h"
+#include "mirror/call_site.h"
#include "mirror/class-inl.h"
#include "mirror/class_ext.h"
#include "mirror/dex_cache.h"
@@ -40,6 +41,7 @@
#include "mirror/field.h"
#include "mirror/method_type.h"
#include "mirror/method_handle_impl.h"
+#include "mirror/method_handles_lookup.h"
#include "mirror/object-inl.h"
#include "mirror/object_array-inl.h"
#include "mirror/proxy.h"
@@ -185,7 +187,7 @@ class ClassLinkerTest : public CommonRuntimeTest {
void AssertArrayClass(const std::string& array_descriptor, Handle<mirror::Class> array)
REQUIRES_SHARED(Locks::mutator_lock_) {
- ASSERT_TRUE(array.Get() != nullptr);
+ ASSERT_TRUE(array != nullptr);
ASSERT_TRUE(array->GetClass() != nullptr);
ASSERT_EQ(array->GetClass(), array->GetClass()->GetClass());
EXPECT_TRUE(array->GetClass()->GetSuperClass() != nullptr);
@@ -409,7 +411,7 @@ class ClassLinkerTest : public CommonRuntimeTest {
StackHandleScope<1> hs(self);
Handle<mirror::Class> klass(
hs.NewHandle(class_linker_->FindSystemClass(self, descriptor.c_str())));
- ASSERT_TRUE(klass.Get() != nullptr);
+ ASSERT_TRUE(klass != nullptr);
std::string temp;
EXPECT_STREQ(descriptor.c_str(), klass.Get()->GetDescriptor(&temp));
EXPECT_EQ(class_loader, klass->GetClassLoader());
@@ -669,11 +671,13 @@ struct DexCacheOffsets : public CheckOffsets<mirror::DexCache> {
addOffset(OFFSETOF_MEMBER(mirror::DexCache, dex_), "dex");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, dex_file_), "dexFile");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, location_), "location");
+ addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_call_sites_), "numResolvedCallSites");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_fields_), "numResolvedFields");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_method_types_), "numResolvedMethodTypes");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_methods_), "numResolvedMethods");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_types_), "numResolvedTypes");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_strings_), "numStrings");
+ addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_call_sites_), "resolvedCallSites");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_fields_), "resolvedFields");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_method_types_), "resolvedMethodTypes");
addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_methods_), "resolvedMethods");
@@ -762,6 +766,14 @@ struct MethodHandleImplOffsets : public CheckOffsets<mirror::MethodHandleImpl> {
}
};
+struct MethodHandlesLookupOffsets : public CheckOffsets<mirror::MethodHandlesLookup> {
+ MethodHandlesLookupOffsets() : CheckOffsets<mirror::MethodHandlesLookup>(
+ false, "Ljava/lang/invoke/MethodHandles$Lookup;") {
+ addOffset(OFFSETOF_MEMBER(mirror::MethodHandlesLookup, allowed_modes_), "allowedModes");
+ addOffset(OFFSETOF_MEMBER(mirror::MethodHandlesLookup, lookup_class_), "lookupClass");
+ }
+};
+
struct EmulatedStackFrameOffsets : public CheckOffsets<mirror::EmulatedStackFrame> {
EmulatedStackFrameOffsets() : CheckOffsets<mirror::EmulatedStackFrame>(
false, "Ldalvik/system/EmulatedStackFrame;") {
@@ -772,6 +784,13 @@ struct EmulatedStackFrameOffsets : public CheckOffsets<mirror::EmulatedStackFram
}
};
+struct CallSiteOffsets : public CheckOffsets<mirror::CallSite> {
+ CallSiteOffsets() : CheckOffsets<mirror::CallSite>(
+ false, "Ljava/lang/invoke/CallSite;") {
+ addOffset(OFFSETOF_MEMBER(mirror::CallSite, target_), "target");
+ }
+};
+
// C++ fields must exactly match the fields in the Java classes. If this fails,
// reorder the fields in the C++ class. Managed class fields are ordered by
// ClassLinker::LinkFields.
@@ -794,7 +813,9 @@ TEST_F(ClassLinkerTest, ValidateFieldOrderOfJavaCppUnionClasses) {
EXPECT_TRUE(MethodTypeOffsets().Check());
EXPECT_TRUE(MethodHandleOffsets().Check());
EXPECT_TRUE(MethodHandleImplOffsets().Check());
+ EXPECT_TRUE(MethodHandlesLookupOffsets().Check());
EXPECT_TRUE(EmulatedStackFrameOffsets().Check());
+ EXPECT_TRUE(CallSiteOffsets().Check());
}
TEST_F(ClassLinkerTest, FindClassNonexistent) {
@@ -1411,13 +1432,13 @@ TEST_F(ClassLinkerTest, IsBootStrapClassLoaded) {
// java.lang.Object is a bootstrap class.
Handle<mirror::Class> jlo_class(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
- ASSERT_TRUE(jlo_class.Get() != nullptr);
+ ASSERT_TRUE(jlo_class != nullptr);
EXPECT_TRUE(jlo_class.Get()->IsBootStrapClassLoaded());
// Statics is not a bootstrap class.
Handle<mirror::Class> statics(
hs.NewHandle(class_linker_->FindClass(soa.Self(), "LStatics;", class_loader)));
- ASSERT_TRUE(statics.Get() != nullptr);
+ ASSERT_TRUE(statics != nullptr);
EXPECT_FALSE(statics.Get()->IsBootStrapClassLoaded());
}
@@ -1431,11 +1452,11 @@ TEST_F(ClassLinkerTest, RegisterDexFileName) {
ReaderMutexLock mu(soa.Self(), *Locks::dex_lock_);
for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
dex_cache.Assign(soa.Self()->DecodeJObject(data.weak_root)->AsDexCache());
- if (dex_cache.Get() != nullptr) {
+ if (dex_cache != nullptr) {
break;
}
}
- ASSERT_TRUE(dex_cache.Get() != nullptr);
+ ASSERT_TRUE(dex_cache != nullptr);
}
// Make a copy of the dex cache and change the name.
dex_cache.Assign(dex_cache->Clone(soa.Self())->AsDexCache());
@@ -1487,7 +1508,7 @@ TEST_F(ClassLinkerMethodHandlesTest, TestResolveMethodTypes) {
class_linker_->ResolveMethodType(dex_file, method1_id.proto_idx_, dex_cache, class_loader));
// Assert that the method type was resolved successfully.
- ASSERT_TRUE(method1_type.Get() != nullptr);
+ ASSERT_TRUE(method1_type != nullptr);
// Assert that the return type and the method arguments are as we expect.
Handle<mirror::Class> string_class(
diff --git a/runtime/class_table_test.cc b/runtime/class_table_test.cc
index f1248eb00c..18c2b827fe 100644
--- a/runtime/class_table_test.cc
+++ b/runtime/class_table_test.cc
@@ -80,7 +80,7 @@ TEST_F(ClassTableTest, ClassTable) {
Handle<mirror::Class> h_Y(
hs.NewHandle(class_linker_->FindClass(soa.Self(), descriptor_y, class_loader)));
Handle<mirror::Object> obj_X = hs.NewHandle(h_X->AllocObject(soa.Self()));
- ASSERT_TRUE(obj_X.Get() != nullptr);
+ ASSERT_TRUE(obj_X != nullptr);
ClassTable table;
EXPECT_EQ(table.NumZygoteClasses(class_loader.Get()), 0u);
EXPECT_EQ(table.NumNonZygoteClasses(class_loader.Get()), 0u);
diff --git a/runtime/common_throws.cc b/runtime/common_throws.cc
index a44f79e193..4f4bed0169 100644
--- a/runtime/common_throws.cc
+++ b/runtime/common_throws.cc
@@ -126,6 +126,22 @@ void ThrowArrayStoreException(ObjPtr<mirror::Class> element_class,
mirror::Class::PrettyDescriptor(array_class).c_str()).c_str());
}
+// BootstrapMethodError
+
+void ThrowBootstrapMethodError(const char* fmt, ...) {
+ va_list args;
+ va_start(args, fmt);
+ ThrowException("Ljava/lang/BootstrapMethodError;", nullptr, fmt, &args);
+ va_end(args);
+}
+
+void ThrowWrappedBootstrapMethodError(const char* fmt, ...) {
+ va_list args;
+ va_start(args, fmt);
+ ThrowWrappedException("Ljava/lang/BootstrapMethodError;", nullptr, fmt, &args);
+ va_end(args);
+}
+
// ClassCastException
void ThrowClassCastException(ObjPtr<mirror::Class> dest_type, ObjPtr<mirror::Class> src_type) {
diff --git a/runtime/common_throws.h b/runtime/common_throws.h
index 76ea2ae6c8..55a89388ea 100644
--- a/runtime/common_throws.h
+++ b/runtime/common_throws.h
@@ -56,6 +56,14 @@ void ThrowArrayStoreException(ObjPtr<mirror::Class> element_class,
ObjPtr<mirror::Class> array_class)
REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
+// BootstrapMethodError
+
+void ThrowBootstrapMethodError(const char* fmt, ...)
+ REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
+
+void ThrowWrappedBootstrapMethodError(const char* fmt, ...)
+ REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
+
// ClassCircularityError
void ThrowClassCircularityError(ObjPtr<mirror::Class> c)
@@ -236,7 +244,7 @@ void ThrowVerifyError(ObjPtr<mirror::Class> referrer, const char* fmt, ...)
__attribute__((__format__(__printf__, 2, 3)))
REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
-// WrontMethodTypeException
+// WrongMethodTypeException
void ThrowWrongMethodTypeException(mirror::MethodType* callee_type,
mirror::MethodType* callsite_type)
REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 1a0cec075c..cfdc6e1afb 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -1765,13 +1765,13 @@ static JDWP::JdwpError GetFieldValueImpl(JDWP::RefTypeId ref_type_id, JDWP::Obje
StackHandleScope<2> hs(self);
MutableHandle<mirror::Object>
o(hs.NewHandle(Dbg::GetObjectRegistry()->Get<mirror::Object*>(object_id, &error)));
- if ((!is_static && o.Get() == nullptr) || error != JDWP::ERR_NONE) {
+ if ((!is_static && o == nullptr) || error != JDWP::ERR_NONE) {
return JDWP::ERR_INVALID_OBJECT;
}
ArtField* f = FromFieldId(field_id);
mirror::Class* receiver_class = c;
- if (receiver_class == nullptr && o.Get() != nullptr) {
+ if (receiver_class == nullptr && o != nullptr) {
receiver_class = o->GetClass();
}
@@ -1899,7 +1899,7 @@ static JDWP::JdwpError SetFieldValueImpl(JDWP::ObjectId object_id, JDWP::FieldId
StackHandleScope<2> hs(self);
MutableHandle<mirror::Object>
o(hs.NewHandle(Dbg::GetObjectRegistry()->Get<mirror::Object*>(object_id, &error)));
- if ((!is_static && o.Get() == nullptr) || error != JDWP::ERR_NONE) {
+ if ((!is_static && o == nullptr) || error != JDWP::ERR_NONE) {
return JDWP::ERR_INVALID_OBJECT;
}
ArtField* f = FromFieldId(field_id);
@@ -2867,7 +2867,7 @@ void Dbg::PostLocationEvent(ArtMethod* m, int dex_pc, mirror::Object* this_objec
StackHandleScope<1> hs(self);
Handle<mirror::Throwable> pending_exception(hs.NewHandle(self->GetException()));
self->ClearException();
- if (kIsDebugBuild && pending_exception.Get() != nullptr) {
+ if (kIsDebugBuild && pending_exception != nullptr) {
const DexFile::CodeItem* code_item = location.method->GetCodeItem();
const Instruction* instr = Instruction::At(&code_item->insns_[location.dex_pc]);
CHECK_EQ(Instruction::MOVE_EXCEPTION, instr->Opcode());
@@ -2875,7 +2875,7 @@ void Dbg::PostLocationEvent(ArtMethod* m, int dex_pc, mirror::Object* this_objec
gJdwpState->PostLocationEvent(&location, this_object, event_flags, return_value);
- if (pending_exception.Get() != nullptr) {
+ if (pending_exception != nullptr) {
self->SetException(pending_exception.Get());
}
}
@@ -4027,7 +4027,7 @@ void Dbg::ExecuteMethod(DebugInvokeReq* pReq) {
ExecuteMethodWithoutPendingException(soa, pReq);
// If an exception was pending before the invoke, restore it now.
- if (old_exception.Get() != nullptr) {
+ if (old_exception != nullptr) {
soa.Self()->SetException(old_exception.Get());
}
}
@@ -4356,9 +4356,9 @@ void Dbg::DdmSendThreadNotification(Thread* t, uint32_t type) {
ScopedObjectAccessUnchecked soa(Thread::Current());
StackHandleScope<1> hs(soa.Self());
Handle<mirror::String> name(hs.NewHandle(t->GetThreadName()));
- size_t char_count = (name.Get() != nullptr) ? name->GetLength() : 0;
- const jchar* chars = (name.Get() != nullptr) ? name->GetValue() : nullptr;
- bool is_compressed = (name.Get() != nullptr) ? name->IsCompressed() : false;
+ size_t char_count = (name != nullptr) ? name->GetLength() : 0;
+ const jchar* chars = (name != nullptr) ? name->GetValue() : nullptr;
+ bool is_compressed = (name != nullptr) ? name->IsCompressed() : false;
std::vector<uint8_t> bytes;
JDWP::Append4BE(bytes, t->GetThreadId());
diff --git a/runtime/dex2oat_environment_test.h b/runtime/dex2oat_environment_test.h
index 8b0c51c998..e58c6f541e 100644
--- a/runtime/dex2oat_environment_test.h
+++ b/runtime/dex2oat_environment_test.h
@@ -53,7 +53,7 @@ class Dex2oatEnvironmentTest : public CommonRuntimeTest {
ASSERT_EQ(0, mkdir(odex_dir_.c_str(), 0700));
// Verify the environment is as we expect
- uint32_t checksum;
+ std::vector<uint32_t> checksums;
std::string error_msg;
ASSERT_TRUE(OS::FileExists(GetSystemImageFile().c_str()))
<< "Expected pre-compiled boot image to be at: " << GetSystemImageFile();
@@ -61,7 +61,7 @@ class Dex2oatEnvironmentTest : public CommonRuntimeTest {
<< "Expected dex file to be at: " << GetDexSrc1();
ASSERT_TRUE(OS::FileExists(GetStrippedDexSrc1().c_str()))
<< "Expected stripped dex file to be at: " << GetStrippedDexSrc1();
- ASSERT_FALSE(DexFile::GetChecksum(GetStrippedDexSrc1().c_str(), &checksum, &error_msg))
+ ASSERT_FALSE(DexFile::GetMultiDexChecksums(GetStrippedDexSrc1().c_str(), &checksums, &error_msg))
<< "Expected stripped dex file to be stripped: " << GetStrippedDexSrc1();
ASSERT_TRUE(OS::FileExists(GetDexSrc2().c_str()))
<< "Expected dex file to be at: " << GetDexSrc2();
diff --git a/runtime/dex_file.cc b/runtime/dex_file.cc
index f59420d332..b6a2e09719 100644
--- a/runtime/dex_file.cc
+++ b/runtime/dex_file.cc
@@ -72,23 +72,13 @@ struct DexFile::AnnotationValue {
uint8_t type_;
};
-bool DexFile::GetChecksum(const char* filename, uint32_t* checksum, std::string* error_msg) {
- CHECK(checksum != nullptr);
+bool DexFile::GetMultiDexChecksums(const char* filename,
+ std::vector<uint32_t>* checksums,
+ std::string* error_msg) {
+ CHECK(checksums != nullptr);
uint32_t magic;
- // Strip ":...", which is the location
- const char* zip_entry_name = kClassesDex;
- const char* file_part = filename;
- std::string file_part_storage;
-
- if (DexFile::IsMultiDexLocation(filename)) {
- file_part_storage = GetBaseLocation(filename);
- file_part = file_part_storage.c_str();
- zip_entry_name = filename + file_part_storage.size() + 1;
- DCHECK_EQ(zip_entry_name[-1], kMultiDexSeparator);
- }
-
- File fd = OpenAndReadMagic(file_part, &magic, error_msg);
+ File fd = OpenAndReadMagic(filename, &magic, error_msg);
if (fd.Fd() == -1) {
DCHECK(!error_msg->empty());
return false;
@@ -97,17 +87,25 @@ bool DexFile::GetChecksum(const char* filename, uint32_t* checksum, std::string*
std::unique_ptr<ZipArchive> zip_archive(
ZipArchive::OpenFromFd(fd.Release(), filename, error_msg));
if (zip_archive.get() == nullptr) {
- *error_msg = StringPrintf("Failed to open zip archive '%s' (error msg: %s)", file_part,
+ *error_msg = StringPrintf("Failed to open zip archive '%s' (error msg: %s)", filename,
error_msg->c_str());
return false;
}
- std::unique_ptr<ZipEntry> zip_entry(zip_archive->Find(zip_entry_name, error_msg));
+
+ uint32_t i = 0;
+ std::string zip_entry_name = GetMultiDexClassesDexName(i++);
+ std::unique_ptr<ZipEntry> zip_entry(zip_archive->Find(zip_entry_name.c_str(), error_msg));
if (zip_entry.get() == nullptr) {
- *error_msg = StringPrintf("Zip archive '%s' doesn't contain %s (error msg: %s)", file_part,
- zip_entry_name, error_msg->c_str());
+ *error_msg = StringPrintf("Zip archive '%s' doesn't contain %s (error msg: %s)", filename,
+ zip_entry_name.c_str(), error_msg->c_str());
return false;
}
- *checksum = zip_entry->GetCrc32();
+
+ do {
+ checksums->push_back(zip_entry->GetCrc32());
+ zip_entry_name = DexFile::GetMultiDexClassesDexName(i++);
+ zip_entry.reset(zip_archive->Find(zip_entry_name.c_str(), error_msg));
+ } while (zip_entry.get() != nullptr);
return true;
}
if (IsDexMagic(magic)) {
@@ -116,7 +114,7 @@ bool DexFile::GetChecksum(const char* filename, uint32_t* checksum, std::string*
if (dex_file.get() == nullptr) {
return false;
}
- *checksum = dex_file->GetHeader().checksum_;
+ checksums->push_back(dex_file->GetHeader().checksum_);
return true;
}
*error_msg = StringPrintf("Expected valid zip or dex file: '%s'", filename);
@@ -333,7 +331,32 @@ std::unique_ptr<const DexFile> DexFile::OpenOneDexFileFromZip(const ZipArchive&
*error_code = ZipOpenErrorCode::kDexFileError;
return nullptr;
}
- std::unique_ptr<MemMap> map(zip_entry->ExtractToMemMap(location.c_str(), entry_name, error_msg));
+
+ std::unique_ptr<MemMap> map;
+ if (zip_entry->IsUncompressed()) {
+ if (!zip_entry->IsAlignedTo(alignof(Header))) {
+ // Do not mmap unaligned ZIP entries because
+ // doing so would fail dex verification which requires 4 byte alignment.
+ LOG(WARNING) << "Can't mmap dex file " << location << "!" << entry_name << " directly; "
+ << "please zipalign to " << alignof(Header) << " bytes. "
+ << "Falling back to extracting file.";
+ } else {
+ // Map uncompressed files within zip as file-backed to avoid a dirty copy.
+ map.reset(zip_entry->MapDirectlyFromFile(location.c_str(), /*out*/error_msg));
+ if (map == nullptr) {
+ LOG(WARNING) << "Can't mmap dex file " << location << "!" << entry_name << " directly; "
+ << "is your ZIP file corrupted? Falling back to extraction.";
+ // Try again with Extraction which still has a chance of recovery.
+ }
+ }
+ }
+
+ if (map == nullptr) {
+ // Default path for compressed ZIP entries,
+ // and fallback for stored ZIP entries.
+ map.reset(zip_entry->ExtractToMemMap(location.c_str(), entry_name, error_msg));
+ }
+
if (map == nullptr) {
*error_msg = StringPrintf("Failed to extract '%s' from '%s': %s", entry_name, location.c_str(),
error_msg->c_str());
@@ -415,7 +438,7 @@ bool DexFile::OpenAllDexFilesFromZip(const ZipArchive& zip_archive,
&error_code));
if (next_dex_file.get() == nullptr) {
if (error_code != ZipOpenErrorCode::kEntryNotFound) {
- LOG(WARNING) << error_msg;
+ LOG(WARNING) << "Zip open failed: " << *error_msg;
}
break;
} else {
@@ -497,9 +520,19 @@ DexFile::DexFile(const uint8_t* base,
method_ids_(reinterpret_cast<const MethodId*>(base + header_->method_ids_off_)),
proto_ids_(reinterpret_cast<const ProtoId*>(base + header_->proto_ids_off_)),
class_defs_(reinterpret_cast<const ClassDef*>(base + header_->class_defs_off_)),
+ method_handles_(nullptr),
+ num_method_handles_(0),
+ call_site_ids_(nullptr),
+ num_call_site_ids_(0),
oat_dex_file_(oat_dex_file) {
CHECK(begin_ != nullptr) << GetLocation();
CHECK_GT(size_, 0U) << GetLocation();
+ // Check base (=header) alignment.
+ // Must be 4-byte aligned to avoid undefined behavior when accessing
+ // any of the sections via a pointer.
+ CHECK_ALIGNED(begin_, alignof(Header));
+
+ InitializeSectionsFromMapList();
}
DexFile::~DexFile() {
@@ -540,6 +573,29 @@ bool DexFile::CheckMagicAndVersion(std::string* error_msg) const {
return true;
}
+void DexFile::InitializeSectionsFromMapList() {
+ const MapList* map_list = reinterpret_cast<const MapList*>(begin_ + header_->map_off_);
+ const size_t count = map_list->size_;
+
+ size_t map_limit = header_->map_off_ + count * sizeof(MapItem);
+ if (header_->map_off_ >= map_limit || map_limit > size_) {
+ // Overflow or out out of bounds. The dex file verifier runs after
+ // this method and will reject the file as it is malformed.
+ return;
+ }
+
+ for (size_t i = 0; i < count; ++i) {
+ const MapItem& map_item = map_list->list_[i];
+ if (map_item.type_ == kDexTypeMethodHandleItem) {
+ method_handles_ = reinterpret_cast<const MethodHandleItem*>(begin_ + map_item.offset_);
+ num_method_handles_ = map_item.size_;
+ } else if (map_item.type_ == kDexTypeCallSiteIdItem) {
+ call_site_ids_ = reinterpret_cast<const CallSiteIdItem*>(begin_ + map_item.offset_);
+ num_call_site_ids_ = map_item.size_;
+ }
+ }
+}
+
bool DexFile::IsMagicValid(const uint8_t* magic) {
return (memcmp(magic, kDexMagic, sizeof(kDexMagic)) == 0);
}
@@ -1339,24 +1395,20 @@ void ClassDataItemIterator::ReadClassDataMethod() {
}
}
-EncodedStaticFieldValueIterator::EncodedStaticFieldValueIterator(const DexFile& dex_file,
- const DexFile::ClassDef& class_def)
+EncodedArrayValueIterator::EncodedArrayValueIterator(const DexFile& dex_file,
+ const uint8_t* array_data)
: dex_file_(dex_file),
array_size_(),
pos_(-1),
+ ptr_(array_data),
type_(kByte) {
- ptr_ = dex_file_.GetEncodedStaticFieldValuesArray(class_def);
- if (ptr_ == nullptr) {
- array_size_ = 0;
- } else {
- array_size_ = DecodeUnsignedLeb128(&ptr_);
- }
+ array_size_ = (ptr_ != nullptr) ? DecodeUnsignedLeb128(&ptr_) : 0;
if (array_size_ > 0) {
Next();
}
}
-void EncodedStaticFieldValueIterator::Next() {
+void EncodedArrayValueIterator::Next() {
pos_++;
if (pos_ >= array_size_) {
return;
@@ -1396,6 +1448,8 @@ void EncodedStaticFieldValueIterator::Next() {
break;
case kString:
case kType:
+ case kMethodType:
+ case kMethodHandle:
jval_.i = DexFile::ReadUnsignedInt(ptr_, value_arg, false);
break;
case kField:
diff --git a/runtime/dex_file.h b/runtime/dex_file.h
index cb7f174787..58b8e792ee 100644
--- a/runtime/dex_file.h
+++ b/runtime/dex_file.h
@@ -103,7 +103,7 @@ class DexFile {
};
// Map item type codes.
- enum {
+ enum MapItemType : uint16_t { // private
kDexTypeHeaderItem = 0x0000,
kDexTypeStringIdItem = 0x0001,
kDexTypeTypeIdItem = 0x0002,
@@ -111,6 +111,8 @@ class DexFile {
kDexTypeFieldIdItem = 0x0004,
kDexTypeMethodIdItem = 0x0005,
kDexTypeClassDefItem = 0x0006,
+ kDexTypeCallSiteIdItem = 0x0007,
+ kDexTypeMethodHandleItem = 0x0008,
kDexTypeMapList = 0x1000,
kDexTypeTypeList = 0x1001,
kDexTypeAnnotationSetRefList = 0x1002,
@@ -260,6 +262,37 @@ class DexFile {
DISALLOW_COPY_AND_ASSIGN(TypeList);
};
+ // MethodHandle Types
+ enum class MethodHandleType : uint16_t { // private
+ kStaticPut = 0x0000, // a setter for a given static field.
+ kStaticGet = 0x0001, // a getter for a given static field.
+ kInstancePut = 0x0002, // a setter for a given instance field.
+ kInstanceGet = 0x0003, // a getter for a given instance field.
+ kInvokeStatic = 0x0004, // an invoker for a given static method.
+ kInvokeInstance = 0x0005, // invoke_instance : an invoker for a given instance method. This
+ // can be any non-static method on any class (or interface) except
+ // for “<init>”.
+ kInvokeConstructor = 0x0006, // an invoker for a given constructor.
+ kLast = kInvokeConstructor
+ };
+
+ // raw method_handle_item
+ struct MethodHandleItem {
+ uint16_t method_handle_type_;
+ uint16_t reserved1_; // Reserved for future use.
+ uint16_t field_or_method_idx_; // Field index for accessors, method index otherwise.
+ uint16_t reserved2_; // Reserved for future use.
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MethodHandleItem);
+ };
+
+ // raw call_site_id_item
+ struct CallSiteIdItem {
+ uint32_t data_off_; // Offset into data section pointing to encoded array items.
+ private:
+ DISALLOW_COPY_AND_ASSIGN(CallSiteIdItem);
+ };
+
// Raw code_item.
struct CodeItem {
uint16_t registers_size_; // the number of registers used by this code
@@ -302,6 +335,8 @@ class DexFile {
kDexAnnotationLong = 0x06,
kDexAnnotationFloat = 0x10,
kDexAnnotationDouble = 0x11,
+ kDexAnnotationMethodType = 0x15,
+ kDexAnnotationMethodHandle = 0x16,
kDexAnnotationString = 0x17,
kDexAnnotationType = 0x18,
kDexAnnotationField = 0x19,
@@ -389,11 +424,18 @@ class DexFile {
struct AnnotationValue;
- // Returns the checksum of a file for comparison with GetLocationChecksum().
- // For .dex files, this is the header checksum.
- // For zip files, this is the classes.dex zip entry CRC32 checksum.
- // Return true if the checksum could be found, false otherwise.
- static bool GetChecksum(const char* filename, uint32_t* checksum, std::string* error_msg);
+ // Returns the checksums of a file for comparison with GetLocationChecksum().
+ // For .dex files, this is the single header checksum.
+ // For zip files, this is the zip entry CRC32 checksum for classes.dex and
+ // each additional multidex entry classes2.dex, classes3.dex, etc.
+ // Return true if the checksums could be found, false otherwise.
+ static bool GetMultiDexChecksums(const char* filename,
+ std::vector<uint32_t>* checksums,
+ std::string* error_msg);
+
+ // Check whether a location denotes a multidex dex file. This is a very simple check: returns
+ // whether the string contains the separator character.
+ static bool IsMultiDexLocation(const char* location);
// Opens .dex file, backed by existing memory
static std::unique_ptr<const DexFile> Open(const uint8_t* base,
@@ -683,6 +725,24 @@ class DexFile {
}
}
+ uint32_t NumMethodHandles() const {
+ return num_method_handles_;
+ }
+
+ const MethodHandleItem& GetMethodHandle(uint32_t idx) const {
+ CHECK_LT(idx, NumMethodHandles());
+ return method_handles_[idx];
+ }
+
+ uint32_t NumCallSiteIds() const {
+ return num_call_site_ids_;
+ }
+
+ const CallSiteIdItem& GetCallSiteId(uint32_t idx) const {
+ CHECK_LT(idx, NumCallSiteIds());
+ return call_site_ids_[idx];
+ }
+
// Returns a pointer to the raw memory mapped class_data_item
const uint8_t* GetClassData(const ClassDef& class_def) const {
if (class_def.class_data_off_ == 0) {
@@ -761,6 +821,10 @@ class DexFile {
}
}
+ const uint8_t* GetCallSiteEncodedValuesArray(const CallSiteIdItem& call_site_id) const {
+ return begin_ + call_site_id.data_off_;
+ }
+
static const TryItem* GetTryItems(const CodeItem& code_item, uint32_t offset);
// Get the base of the encoded data for the given DexCode.
@@ -1101,9 +1165,8 @@ class DexFile {
// Returns true if the header magic and version numbers are of the expected values.
bool CheckMagicAndVersion(std::string* error_msg) const;
- // Check whether a location denotes a multidex dex file. This is a very simple check: returns
- // whether the string contains the separator character.
- static bool IsMultiDexLocation(const char* location);
+ // Initialize section info for sections only found in map. Returns true on success.
+ void InitializeSectionsFromMapList();
// The base address of the memory mapping.
const uint8_t* const begin_;
@@ -1143,6 +1206,18 @@ class DexFile {
// Points to the base of the class definition list.
const ClassDef* const class_defs_;
+ // Points to the base of the method handles list.
+ const MethodHandleItem* method_handles_;
+
+ // Number of elements in the method handles list.
+ size_t num_method_handles_;
+
+ // Points to the base of the call sites id list.
+ const CallSiteIdItem* call_site_ids_;
+
+ // Number of elements in the call sites list.
+ size_t num_call_site_ids_;
+
// If this dex file was loaded from an oat file, oat_dex_file_ contains a
// pointer to the OatDexFile it was loaded from. Otherwise oat_dex_file_ is
// null.
@@ -1409,32 +1484,33 @@ class ClassDataItemIterator {
DISALLOW_IMPLICIT_CONSTRUCTORS(ClassDataItemIterator);
};
-class EncodedStaticFieldValueIterator {
+class EncodedArrayValueIterator {
public:
- EncodedStaticFieldValueIterator(const DexFile& dex_file,
- const DexFile::ClassDef& class_def);
+ EncodedArrayValueIterator(const DexFile& dex_file, const uint8_t* array_data);
bool HasNext() const { return pos_ < array_size_; }
void Next();
enum ValueType {
- kByte = 0x00,
- kShort = 0x02,
- kChar = 0x03,
- kInt = 0x04,
- kLong = 0x06,
- kFloat = 0x10,
- kDouble = 0x11,
- kString = 0x17,
- kType = 0x18,
- kField = 0x19,
- kMethod = 0x1a,
- kEnum = 0x1b,
- kArray = 0x1c,
- kAnnotation = 0x1d,
- kNull = 0x1e,
- kBoolean = 0x1f
+ kByte = 0x00,
+ kShort = 0x02,
+ kChar = 0x03,
+ kInt = 0x04,
+ kLong = 0x06,
+ kFloat = 0x10,
+ kDouble = 0x11,
+ kMethodType = 0x15,
+ kMethodHandle = 0x16,
+ kString = 0x17,
+ kType = 0x18,
+ kField = 0x19,
+ kMethod = 0x1a,
+ kEnum = 0x1b,
+ kArray = 0x1c,
+ kAnnotation = 0x1d,
+ kNull = 0x1e,
+ kBoolean = 0x1f,
};
ValueType GetValueType() const { return type_; }
@@ -1452,10 +1528,38 @@ class EncodedStaticFieldValueIterator {
jvalue jval_; // Value of current encoded value.
private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(EncodedArrayValueIterator);
+};
+std::ostream& operator<<(std::ostream& os, const EncodedArrayValueIterator::ValueType& code);
+
+class EncodedStaticFieldValueIterator : public EncodedArrayValueIterator {
+ public:
+ EncodedStaticFieldValueIterator(const DexFile& dex_file,
+ const DexFile::ClassDef& class_def)
+ : EncodedArrayValueIterator(dex_file,
+ dex_file.GetEncodedStaticFieldValuesArray(class_def))
+ {}
+
+ private:
DISALLOW_IMPLICIT_CONSTRUCTORS(EncodedStaticFieldValueIterator);
};
std::ostream& operator<<(std::ostream& os, const EncodedStaticFieldValueIterator::ValueType& code);
+class CallSiteArrayValueIterator : public EncodedArrayValueIterator {
+ public:
+ CallSiteArrayValueIterator(const DexFile& dex_file,
+ const DexFile::CallSiteIdItem& call_site_id)
+ : EncodedArrayValueIterator(dex_file,
+ dex_file.GetCallSiteEncodedValuesArray(call_site_id))
+ {}
+
+ uint32_t Size() const { return array_size_; }
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(CallSiteArrayValueIterator);
+};
+std::ostream& operator<<(std::ostream& os, const CallSiteArrayValueIterator::ValueType& code);
+
class CatchHandlerIterator {
public:
CatchHandlerIterator(const DexFile::CodeItem& code_item, uint32_t address);
diff --git a/runtime/dex_file_annotations.cc b/runtime/dex_file_annotations.cc
index 16a447b0a6..a95f94cabb 100644
--- a/runtime/dex_file_annotations.cc
+++ b/runtime/dex_file_annotations.cc
@@ -252,7 +252,7 @@ mirror::Object* ProcessEncodedAnnotation(Handle<mirror::Class> klass, const uint
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Handle<mirror::Class> annotation_class(hs.NewHandle(
class_linker->ResolveType(klass->GetDexFile(), dex::TypeIndex(type_index), klass.Get())));
- if (annotation_class.Get() == nullptr) {
+ if (annotation_class == nullptr) {
LOG(INFO) << "Unable to resolve " << klass->PrettyClass() << " annotation class " << type_index;
DCHECK(Thread::Current()->IsExceptionPending());
Thread::Current()->ClearException();
@@ -481,7 +481,7 @@ bool ProcessAnnotationValue(Handle<mirror::Class> klass,
break;
}
case DexFile::kDexAnnotationArray:
- if (result_style == DexFile::kAllRaw || array_class.Get() == nullptr) {
+ if (result_style == DexFile::kAllRaw || array_class == nullptr) {
return false;
} else {
ScopedObjectAccessUnchecked soa(self);
@@ -491,7 +491,7 @@ bool ProcessAnnotationValue(Handle<mirror::Class> klass,
Handle<mirror::Array> new_array(hs.NewHandle(mirror::Array::Alloc<true>(
self, array_class.Get(), size, array_class->GetComponentSizeShift(),
Runtime::Current()->GetHeap()->GetCurrentAllocator())));
- if (new_array.Get() == nullptr) {
+ if (new_array == nullptr) {
LOG(ERROR) << "Annotation element array allocation failed with size " << size;
return false;
}
@@ -631,8 +631,8 @@ mirror::Object* CreateAnnotationMember(Handle<mirror::Class> klass,
}
Handle<mirror::Method> method_object(hs.NewHandle(method_obj_ptr));
- if (new_member.Get() == nullptr || string_name.Get() == nullptr ||
- method_object.Get() == nullptr || method_return.Get() == nullptr) {
+ if (new_member == nullptr || string_name == nullptr ||
+ method_object == nullptr || method_return == nullptr) {
LOG(ERROR) << StringPrintf("Failed creating annotation element (m=%p n=%p a=%p r=%p",
new_member.Get(), string_name.Get(), method_object.Get(), method_return.Get());
return nullptr;
@@ -740,7 +740,7 @@ mirror::ObjectArray<mirror::String>* GetSignatureValue(Handle<mirror::Class> kla
ObjPtr<mirror::Class> string_class = mirror::String::GetJavaLangString();
Handle<mirror::Class> string_array_class(hs.NewHandle(
Runtime::Current()->GetClassLinker()->FindArrayClass(Thread::Current(), &string_class)));
- if (string_array_class.Get() == nullptr) {
+ if (string_array_class == nullptr) {
return nullptr;
}
mirror::Object* obj =
@@ -766,7 +766,7 @@ mirror::ObjectArray<mirror::Class>* GetThrowsValue(Handle<mirror::Class> klass,
ObjPtr<mirror::Class> class_class = mirror::Class::GetJavaLangClass();
Handle<mirror::Class> class_array_class(hs.NewHandle(
Runtime::Current()->GetClassLinker()->FindArrayClass(Thread::Current(), &class_class)));
- if (class_array_class.Get() == nullptr) {
+ if (class_array_class == nullptr) {
return nullptr;
}
mirror::Object* obj =
@@ -796,7 +796,7 @@ mirror::ObjectArray<mirror::Object>* ProcessAnnotationSet(
uint32_t size = annotation_set->size_;
Handle<mirror::ObjectArray<mirror::Object>> result(hs.NewHandle(
mirror::ObjectArray<mirror::Object>::Alloc(self, annotation_array_class.Get(), size)));
- if (result.Get() == nullptr) {
+ if (result == nullptr) {
return nullptr;
}
@@ -854,7 +854,7 @@ mirror::ObjectArray<mirror::Object>* ProcessAnnotationSetRefList(
}
Handle<mirror::ObjectArray<mirror::Object>> annotation_array_array(hs.NewHandle(
mirror::ObjectArray<mirror::Object>::Alloc(self, annotation_array_array_class, size)));
- if (annotation_array_array.Get() == nullptr) {
+ if (annotation_array_array == nullptr) {
LOG(ERROR) << "Annotation set ref array allocation failed";
return nullptr;
}
@@ -1056,7 +1056,7 @@ bool GetParametersMetadataForMethod(ArtMethod* method,
ObjPtr<mirror::Class> string_class = mirror::String::GetJavaLangString();
Handle<mirror::Class> string_array_class(hs.NewHandle(
Runtime::Current()->GetClassLinker()->FindArrayClass(Thread::Current(), &string_class)));
- if (UNLIKELY(string_array_class.Get() == nullptr)) {
+ if (UNLIKELY(string_array_class == nullptr)) {
return false;
}
@@ -1067,13 +1067,13 @@ bool GetParametersMetadataForMethod(ArtMethod* method,
"names",
string_array_class,
DexFile::kDexAnnotationArray));
- if (names_obj.Get() == nullptr) {
+ if (names_obj == nullptr) {
return false;
}
// Extract the parameters' access flags int[].
Handle<mirror::Class> int_array_class(hs.NewHandle(mirror::IntArray::GetArrayClass()));
- if (UNLIKELY(int_array_class.Get() == nullptr)) {
+ if (UNLIKELY(int_array_class == nullptr)) {
return false;
}
Handle<mirror::Object> access_flags_obj =
@@ -1082,7 +1082,7 @@ bool GetParametersMetadataForMethod(ArtMethod* method,
"accessFlags",
int_array_class,
DexFile::kDexAnnotationArray));
- if (access_flags_obj.Get() == nullptr) {
+ if (access_flags_obj == nullptr) {
return false;
}
@@ -1146,7 +1146,7 @@ mirror::ObjectArray<mirror::Class>* GetDeclaredClasses(Handle<mirror::Class> kla
ObjPtr<mirror::Class> class_class = mirror::Class::GetJavaLangClass();
Handle<mirror::Class> class_array_class(hs.NewHandle(
Runtime::Current()->GetClassLinker()->FindArrayClass(hs.Self(), &class_class)));
- if (class_array_class.Get() == nullptr) {
+ if (class_array_class == nullptr) {
return nullptr;
}
mirror::Object* obj =
diff --git a/runtime/dex_file_test.cc b/runtime/dex_file_test.cc
index 9dca4c0621..9131715fec 100644
--- a/runtime/dex_file_test.cc
+++ b/runtime/dex_file_test.cc
@@ -326,12 +326,32 @@ TEST_F(DexFileTest, GetLocationChecksum) {
}
TEST_F(DexFileTest, GetChecksum) {
- uint32_t checksum;
+ std::vector<uint32_t> checksums;
ScopedObjectAccess soa(Thread::Current());
std::string error_msg;
- EXPECT_TRUE(DexFile::GetChecksum(GetLibCoreDexFileNames()[0].c_str(), &checksum, &error_msg))
+ EXPECT_TRUE(DexFile::GetMultiDexChecksums(GetLibCoreDexFileNames()[0].c_str(), &checksums, &error_msg))
<< error_msg;
- EXPECT_EQ(java_lang_dex_file_->GetLocationChecksum(), checksum);
+ ASSERT_EQ(1U, checksums.size());
+ EXPECT_EQ(java_lang_dex_file_->GetLocationChecksum(), checksums[0]);
+}
+
+TEST_F(DexFileTest, GetMultiDexChecksums) {
+ std::string error_msg;
+ std::vector<uint32_t> checksums;
+ std::string multidex_file = GetTestDexFileName("MultiDex");
+ EXPECT_TRUE(DexFile::GetMultiDexChecksums(multidex_file.c_str(),
+ &checksums,
+ &error_msg)) << error_msg;
+
+ std::vector<std::unique_ptr<const DexFile>> dexes = OpenTestDexFiles("MultiDex");
+ ASSERT_EQ(2U, dexes.size());
+ ASSERT_EQ(2U, checksums.size());
+
+ EXPECT_EQ(dexes[0]->GetLocation(), DexFile::GetMultiDexLocation(0, multidex_file.c_str()));
+ EXPECT_EQ(dexes[0]->GetLocationChecksum(), checksums[0]);
+
+ EXPECT_EQ(dexes[1]->GetLocation(), DexFile::GetMultiDexLocation(1, multidex_file.c_str()));
+ EXPECT_EQ(dexes[1]->GetLocationChecksum(), checksums[1]);
}
TEST_F(DexFileTest, ClassDefs) {
diff --git a/runtime/dex_file_verifier.cc b/runtime/dex_file_verifier.cc
index 318123edcd..0b3f16a3cb 100644
--- a/runtime/dex_file_verifier.cc
+++ b/runtime/dex_file_verifier.cc
@@ -46,8 +46,8 @@ static bool IsValidTypeId(uint16_t low ATTRIBUTE_UNUSED, uint16_t high) {
return (high == 0);
}
-static uint32_t MapTypeToBitMask(uint32_t map_type) {
- switch (map_type) {
+static uint32_t MapTypeToBitMask(DexFile::MapItemType map_item_type) {
+ switch (map_item_type) {
case DexFile::kDexTypeHeaderItem: return 1 << 0;
case DexFile::kDexTypeStringIdItem: return 1 << 1;
case DexFile::kDexTypeTypeIdItem: return 1 << 2;
@@ -55,23 +55,25 @@ static uint32_t MapTypeToBitMask(uint32_t map_type) {
case DexFile::kDexTypeFieldIdItem: return 1 << 4;
case DexFile::kDexTypeMethodIdItem: return 1 << 5;
case DexFile::kDexTypeClassDefItem: return 1 << 6;
- case DexFile::kDexTypeMapList: return 1 << 7;
- case DexFile::kDexTypeTypeList: return 1 << 8;
- case DexFile::kDexTypeAnnotationSetRefList: return 1 << 9;
- case DexFile::kDexTypeAnnotationSetItem: return 1 << 10;
- case DexFile::kDexTypeClassDataItem: return 1 << 11;
- case DexFile::kDexTypeCodeItem: return 1 << 12;
- case DexFile::kDexTypeStringDataItem: return 1 << 13;
- case DexFile::kDexTypeDebugInfoItem: return 1 << 14;
- case DexFile::kDexTypeAnnotationItem: return 1 << 15;
- case DexFile::kDexTypeEncodedArrayItem: return 1 << 16;
- case DexFile::kDexTypeAnnotationsDirectoryItem: return 1 << 17;
+ case DexFile::kDexTypeCallSiteIdItem: return 1 << 7;
+ case DexFile::kDexTypeMethodHandleItem: return 1 << 8;
+ case DexFile::kDexTypeMapList: return 1 << 9;
+ case DexFile::kDexTypeTypeList: return 1 << 10;
+ case DexFile::kDexTypeAnnotationSetRefList: return 1 << 11;
+ case DexFile::kDexTypeAnnotationSetItem: return 1 << 12;
+ case DexFile::kDexTypeClassDataItem: return 1 << 13;
+ case DexFile::kDexTypeCodeItem: return 1 << 14;
+ case DexFile::kDexTypeStringDataItem: return 1 << 15;
+ case DexFile::kDexTypeDebugInfoItem: return 1 << 16;
+ case DexFile::kDexTypeAnnotationItem: return 1 << 17;
+ case DexFile::kDexTypeEncodedArrayItem: return 1 << 18;
+ case DexFile::kDexTypeAnnotationsDirectoryItem: return 1 << 19;
}
return 0;
}
-static bool IsDataSectionType(uint32_t map_type) {
- switch (map_type) {
+static bool IsDataSectionType(DexFile::MapItemType map_item_type) {
+ switch (map_item_type) {
case DexFile::kDexTypeHeaderItem:
case DexFile::kDexTypeStringIdItem:
case DexFile::kDexTypeTypeIdItem:
@@ -80,6 +82,20 @@ static bool IsDataSectionType(uint32_t map_type) {
case DexFile::kDexTypeMethodIdItem:
case DexFile::kDexTypeClassDefItem:
return false;
+ case DexFile::kDexTypeCallSiteIdItem:
+ case DexFile::kDexTypeMethodHandleItem:
+ case DexFile::kDexTypeMapList:
+ case DexFile::kDexTypeTypeList:
+ case DexFile::kDexTypeAnnotationSetRefList:
+ case DexFile::kDexTypeAnnotationSetItem:
+ case DexFile::kDexTypeClassDataItem:
+ case DexFile::kDexTypeCodeItem:
+ case DexFile::kDexTypeStringDataItem:
+ case DexFile::kDexTypeDebugInfoItem:
+ case DexFile::kDexTypeAnnotationItem:
+ case DexFile::kDexTypeEncodedArrayItem:
+ case DexFile::kDexTypeAnnotationsDirectoryItem:
+ return true;
}
return true;
}
@@ -455,7 +471,8 @@ bool DexFileVerifier::CheckMap() {
return false;
}
- if (IsDataSectionType(item->type_)) {
+ DexFile::MapItemType item_type = static_cast<DexFile::MapItemType>(item->type_);
+ if (IsDataSectionType(item_type)) {
uint32_t icount = item->size_;
if (UNLIKELY(icount > data_items_left)) {
ErrorStringPrintf("Too many items in data section: %ud", data_item_count + icount);
@@ -465,7 +482,7 @@ bool DexFileVerifier::CheckMap() {
data_item_count += icount;
}
- uint32_t bit = MapTypeToBitMask(item->type_);
+ uint32_t bit = MapTypeToBitMask(item_type);
if (UNLIKELY(bit == 0)) {
ErrorStringPrintf("Unknown map section type %x", item->type_);
@@ -837,6 +854,28 @@ bool DexFileVerifier::CheckEncodedValue() {
return false;
}
break;
+ case DexFile::kDexAnnotationMethodType: {
+ if (UNLIKELY(value_arg > 3)) {
+ ErrorStringPrintf("Bad encoded_value method type size %x", value_arg);
+ return false;
+ }
+ uint32_t idx = ReadUnsignedLittleEndian(value_arg + 1);
+ if (!CheckIndex(idx, header_->proto_ids_size_, "method_type value")) {
+ return false;
+ }
+ break;
+ }
+ case DexFile::kDexAnnotationMethodHandle: {
+ if (UNLIKELY(value_arg > 3)) {
+ ErrorStringPrintf("Bad encoded_value method handle size %x", value_arg);
+ return false;
+ }
+ uint32_t idx = ReadUnsignedLittleEndian(value_arg + 1);
+ if (!CheckIndex(idx, dex_file_->NumMethodHandles(), "method_handle value")) {
+ return false;
+ }
+ break;
+ }
default:
ErrorStringPrintf("Bogus encoded_value value_type %x", value_type);
return false;
@@ -1455,7 +1494,7 @@ bool DexFileVerifier::CheckIntraAnnotationsDirectoryItem() {
}
bool DexFileVerifier::CheckIntraSectionIterate(size_t offset, uint32_t section_count,
- uint16_t type) {
+ DexFile::MapItemType type) {
// Get the right alignment mask for the type of section.
size_t alignment_mask;
switch (type) {
@@ -1481,6 +1520,7 @@ bool DexFileVerifier::CheckIntraSectionIterate(size_t offset, uint32_t section_c
}
// Check depending on the section type.
+ const uint8_t* start_ptr = ptr_;
switch (type) {
case DexFile::kDexTypeStringIdItem: {
if (!CheckListSize(ptr_, 1, sizeof(DexFile::StringId), "string_ids")) {
@@ -1524,6 +1564,20 @@ bool DexFileVerifier::CheckIntraSectionIterate(size_t offset, uint32_t section_c
ptr_ += sizeof(DexFile::ClassDef);
break;
}
+ case DexFile::kDexTypeCallSiteIdItem: {
+ if (!CheckListSize(ptr_, 1, sizeof(DexFile::CallSiteIdItem), "call_site_ids")) {
+ return false;
+ }
+ ptr_ += sizeof(DexFile::CallSiteIdItem);
+ break;
+ }
+ case DexFile::kDexTypeMethodHandleItem: {
+ if (!CheckListSize(ptr_, 1, sizeof(DexFile::MethodHandleItem), "method_handles")) {
+ return false;
+ }
+ ptr_ += sizeof(DexFile::MethodHandleItem);
+ break;
+ }
case DexFile::kDexTypeTypeList: {
if (!CheckList(sizeof(DexFile::TypeItem), "type_list", &ptr_)) {
return false;
@@ -1584,9 +1638,14 @@ bool DexFileVerifier::CheckIntraSectionIterate(size_t offset, uint32_t section_c
}
break;
}
- default:
- ErrorStringPrintf("Unknown map item type %x", type);
- return false;
+ case DexFile::kDexTypeHeaderItem:
+ case DexFile::kDexTypeMapList:
+ break;
+ }
+
+ if (start_ptr == ptr_) {
+ ErrorStringPrintf("Unknown map item type %x", type);
+ return false;
}
if (IsDataSectionType(type)) {
@@ -1610,7 +1669,9 @@ bool DexFileVerifier::CheckIntraSectionIterate(size_t offset, uint32_t section_c
return true;
}
-bool DexFileVerifier::CheckIntraIdSection(size_t offset, uint32_t count, uint16_t type) {
+bool DexFileVerifier::CheckIntraIdSection(size_t offset,
+ uint32_t count,
+ DexFile::MapItemType type) {
uint32_t expected_offset;
uint32_t expected_size;
@@ -1658,7 +1719,9 @@ bool DexFileVerifier::CheckIntraIdSection(size_t offset, uint32_t count, uint16_
return CheckIntraSectionIterate(offset, count, type);
}
-bool DexFileVerifier::CheckIntraDataSection(size_t offset, uint32_t count, uint16_t type) {
+bool DexFileVerifier::CheckIntraDataSection(size_t offset,
+ uint32_t count,
+ DexFile::MapItemType type) {
size_t data_start = header_->data_off_;
size_t data_end = data_start + header_->data_size_;
@@ -1684,16 +1747,16 @@ bool DexFileVerifier::CheckIntraDataSection(size_t offset, uint32_t count, uint1
bool DexFileVerifier::CheckIntraSection() {
const DexFile::MapList* map = reinterpret_cast<const DexFile::MapList*>(begin_ + header_->map_off_);
const DexFile::MapItem* item = map->list_;
-
- uint32_t count = map->size_;
size_t offset = 0;
+ uint32_t count = map->size_;
ptr_ = begin_;
// Check the items listed in the map.
while (count--) {
+ const size_t current_offset = offset;
uint32_t section_offset = item->offset_;
uint32_t section_count = item->size_;
- uint16_t type = item->type_;
+ DexFile::MapItemType type = static_cast<DexFile::MapItemType>(item->type_);
// Check for padding and overlap between items.
if (!CheckPadding(offset, section_offset)) {
@@ -1741,6 +1804,11 @@ bool DexFileVerifier::CheckIntraSection() {
ptr_ += sizeof(uint32_t) + (map->size_ * sizeof(DexFile::MapItem));
offset = section_offset + sizeof(uint32_t) + (map->size_ * sizeof(DexFile::MapItem));
break;
+ case DexFile::kDexTypeMethodHandleItem:
+ case DexFile::kDexTypeCallSiteIdItem:
+ CheckIntraSectionIterate(section_offset, section_count, type);
+ offset = ptr_ - begin_;
+ break;
case DexFile::kDexTypeTypeList:
case DexFile::kDexTypeAnnotationSetRefList:
case DexFile::kDexTypeAnnotationSetItem:
@@ -1756,7 +1824,9 @@ bool DexFileVerifier::CheckIntraSection() {
}
offset = ptr_ - begin_;
break;
- default:
+ }
+
+ if (offset == current_offset) {
ErrorStringPrintf("Unknown map item type %x", type);
return false;
}
@@ -2237,6 +2307,92 @@ bool DexFileVerifier::CheckInterClassDefItem() {
return true;
}
+bool DexFileVerifier::CheckInterCallSiteIdItem() {
+ const DexFile::CallSiteIdItem* item = reinterpret_cast<const DexFile::CallSiteIdItem*>(ptr_);
+
+ // Check call site referenced by item is in encoded array section.
+ if (!CheckOffsetToTypeMap(item->data_off_, DexFile::kDexTypeEncodedArrayItem)) {
+ ErrorStringPrintf("Invalid offset in CallSideIdItem");
+ return false;
+ }
+
+ CallSiteArrayValueIterator it(*dex_file_, *item);
+
+ // Check Method Handle
+ if (!it.HasNext() || it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodHandle) {
+ ErrorStringPrintf("CallSiteArray missing method handle");
+ return false;
+ }
+
+ uint32_t handle_index = static_cast<uint32_t>(it.GetJavaValue().i);
+ if (handle_index >= dex_file_->NumMethodHandles()) {
+ ErrorStringPrintf("CallSite has bad method handle id: %x", handle_index);
+ return false;
+ }
+
+ // Check target method name.
+ it.Next();
+ if (!it.HasNext() ||
+ it.GetValueType() != EncodedArrayValueIterator::ValueType::kString) {
+ ErrorStringPrintf("CallSiteArray missing target method name");
+ return false;
+ }
+
+ uint32_t name_index = static_cast<uint32_t>(it.GetJavaValue().i);
+ if (name_index >= dex_file_->NumStringIds()) {
+ ErrorStringPrintf("CallSite has bad method name id: %x", name_index);
+ return false;
+ }
+
+ // Check method type.
+ it.Next();
+ if (!it.HasNext() ||
+ it.GetValueType() != EncodedArrayValueIterator::ValueType::kMethodType) {
+ ErrorStringPrintf("CallSiteArray missing method type");
+ return false;
+ }
+
+ uint32_t proto_index = static_cast<uint32_t>(it.GetJavaValue().i);
+ if (proto_index >= dex_file_->NumProtoIds()) {
+ ErrorStringPrintf("CallSite has bad method type: %x", proto_index);
+ return false;
+ }
+
+ ptr_ += sizeof(DexFile::CallSiteIdItem);
+ return true;
+}
+
+bool DexFileVerifier::CheckInterMethodHandleItem() {
+ const DexFile::MethodHandleItem* item = reinterpret_cast<const DexFile::MethodHandleItem*>(ptr_);
+
+ DexFile::MethodHandleType method_handle_type =
+ static_cast<DexFile::MethodHandleType>(item->method_handle_type_);
+ if (method_handle_type > DexFile::MethodHandleType::kLast) {
+ ErrorStringPrintf("Bad method handle type %x", item->method_handle_type_);
+ return false;
+ }
+
+ uint32_t index = item->field_or_method_idx_;
+ switch (method_handle_type) {
+ case DexFile::MethodHandleType::kStaticPut:
+ case DexFile::MethodHandleType::kStaticGet:
+ case DexFile::MethodHandleType::kInstancePut:
+ case DexFile::MethodHandleType::kInstanceGet: {
+ LOAD_FIELD(field, index, "method_handle_item field_idx", return false);
+ break;
+ }
+ case DexFile::MethodHandleType::kInvokeStatic:
+ case DexFile::MethodHandleType::kInvokeInstance:
+ case DexFile::MethodHandleType::kInvokeConstructor: {
+ LOAD_METHOD(method, index, "method_handle_item method_idx", return false);
+ break;
+ }
+ }
+
+ ptr_ += sizeof(DexFile::MethodHandleItem);
+ return true;
+}
+
bool DexFileVerifier::CheckInterAnnotationSetRefList() {
const DexFile::AnnotationSetRefList* list =
reinterpret_cast<const DexFile::AnnotationSetRefList*>(ptr_);
@@ -2386,7 +2542,9 @@ bool DexFileVerifier::CheckInterAnnotationsDirectoryItem() {
return true;
}
-bool DexFileVerifier::CheckInterSectionIterate(size_t offset, uint32_t count, uint16_t type) {
+bool DexFileVerifier::CheckInterSectionIterate(size_t offset,
+ uint32_t count,
+ DexFile::MapItemType type) {
// Get the right alignment mask for the type of section.
size_t alignment_mask;
switch (type) {
@@ -2405,8 +2563,22 @@ bool DexFileVerifier::CheckInterSectionIterate(size_t offset, uint32_t count, ui
ptr_ = begin_ + new_offset;
const uint8_t* prev_ptr = ptr_;
+ if (MapTypeToBitMask(type) == 0) {
+ ErrorStringPrintf("Unknown map item type %x", type);
+ return false;
+ }
+
// Check depending on the section type.
switch (type) {
+ case DexFile::kDexTypeHeaderItem:
+ case DexFile::kDexTypeMapList:
+ case DexFile::kDexTypeTypeList:
+ case DexFile::kDexTypeCodeItem:
+ case DexFile::kDexTypeStringDataItem:
+ case DexFile::kDexTypeDebugInfoItem:
+ case DexFile::kDexTypeAnnotationItem:
+ case DexFile::kDexTypeEncodedArrayItem:
+ break;
case DexFile::kDexTypeStringIdItem: {
if (!CheckInterStringIdItem()) {
return false;
@@ -2451,6 +2623,18 @@ bool DexFileVerifier::CheckInterSectionIterate(size_t offset, uint32_t count, ui
}
break;
}
+ case DexFile::kDexTypeCallSiteIdItem: {
+ if (!CheckInterCallSiteIdItem()) {
+ return false;
+ }
+ break;
+ }
+ case DexFile::kDexTypeMethodHandleItem: {
+ if (!CheckInterMethodHandleItem()) {
+ return false;
+ }
+ break;
+ }
case DexFile::kDexTypeAnnotationSetRefList: {
if (!CheckInterAnnotationSetRefList()) {
return false;
@@ -2483,9 +2667,6 @@ bool DexFileVerifier::CheckInterSectionIterate(size_t offset, uint32_t count, ui
}
break;
}
- default:
- ErrorStringPrintf("Unknown map item type %x", type);
- return false;
}
previous_item_ = prev_ptr;
@@ -2504,7 +2685,8 @@ bool DexFileVerifier::CheckInterSection() {
while (count--) {
uint32_t section_offset = item->offset_;
uint32_t section_count = item->size_;
- uint16_t type = item->type_;
+ DexFile::MapItemType type = static_cast<DexFile::MapItemType>(item->type_);
+ bool found = false;
switch (type) {
case DexFile::kDexTypeHeaderItem:
@@ -2515,6 +2697,7 @@ bool DexFileVerifier::CheckInterSection() {
case DexFile::kDexTypeDebugInfoItem:
case DexFile::kDexTypeAnnotationItem:
case DexFile::kDexTypeEncodedArrayItem:
+ found = true;
break;
case DexFile::kDexTypeStringIdItem:
case DexFile::kDexTypeTypeIdItem:
@@ -2522,6 +2705,8 @@ bool DexFileVerifier::CheckInterSection() {
case DexFile::kDexTypeFieldIdItem:
case DexFile::kDexTypeMethodIdItem:
case DexFile::kDexTypeClassDefItem:
+ case DexFile::kDexTypeCallSiteIdItem:
+ case DexFile::kDexTypeMethodHandleItem:
case DexFile::kDexTypeAnnotationSetRefList:
case DexFile::kDexTypeAnnotationSetItem:
case DexFile::kDexTypeClassDataItem:
@@ -2529,11 +2714,14 @@ bool DexFileVerifier::CheckInterSection() {
if (!CheckInterSectionIterate(section_offset, section_count, type)) {
return false;
}
+ found = true;
break;
}
- default:
- ErrorStringPrintf("Unknown map item type %x", type);
- return false;
+ }
+
+ if (!found) {
+ ErrorStringPrintf("Unknown map item type %x", item->type_);
+ return false;
}
item++;
diff --git a/runtime/dex_file_verifier.h b/runtime/dex_file_verifier.h
index ae206132dd..71b316c403 100644
--- a/runtime/dex_file_verifier.h
+++ b/runtime/dex_file_verifier.h
@@ -122,9 +122,9 @@ class DexFileVerifier {
bool CheckIntraAnnotationItem();
bool CheckIntraAnnotationsDirectoryItem();
- bool CheckIntraSectionIterate(size_t offset, uint32_t count, uint16_t type);
- bool CheckIntraIdSection(size_t offset, uint32_t count, uint16_t type);
- bool CheckIntraDataSection(size_t offset, uint32_t count, uint16_t type);
+ bool CheckIntraSectionIterate(size_t offset, uint32_t count, DexFile::MapItemType type);
+ bool CheckIntraIdSection(size_t offset, uint32_t count, DexFile::MapItemType type);
+ bool CheckIntraDataSection(size_t offset, uint32_t count, DexFile::MapItemType type);
bool CheckIntraSection();
bool CheckOffsetToTypeMap(size_t offset, uint16_t type);
@@ -140,12 +140,14 @@ class DexFileVerifier {
bool CheckInterFieldIdItem();
bool CheckInterMethodIdItem();
bool CheckInterClassDefItem();
+ bool CheckInterCallSiteIdItem();
+ bool CheckInterMethodHandleItem();
bool CheckInterAnnotationSetRefList();
bool CheckInterAnnotationSetItem();
bool CheckInterClassDataItem();
bool CheckInterAnnotationsDirectoryItem();
- bool CheckInterSectionIterate(size_t offset, uint32_t count, uint16_t type);
+ bool CheckInterSectionIterate(size_t offset, uint32_t count, DexFile::MapItemType type);
bool CheckInterSection();
// Load a string by (type) index. Checks whether the index is in bounds, printing the error if
diff --git a/runtime/dex_file_verifier_test.cc b/runtime/dex_file_verifier_test.cc
index c56b20057d..7736f3d615 100644
--- a/runtime/dex_file_verifier_test.cc
+++ b/runtime/dex_file_verifier_test.cc
@@ -1885,4 +1885,209 @@ TEST_F(DexFileVerifierTest, BadInitSignature) {
&error_msg));
}
+static const char* kInvokeCustomDexFiles[] = {
+ // TODO(oth): Revisit this test when we have smali / dx support.
+ // https://cs.corp.google.com/android/toolchain/jack/jack-tests/tests/com/android/jack/java7/invokecustom/test001/Tests.java
+ "ZGV4CjAzOAAEj12s/acmmdGuDL92SWSBh6iLBjxgomWkCAAAcAAAAHhWNBIAAAAAAAAAALwHAAAx"
+ "AAAAcAAAABYAAAA0AQAACQAAAIwBAAADAAAA+AEAAAsAAAAQAgAAAQAAAHACAAAMBgAAmAIAAMID"
+ "AADKAwAAzQMAANIDAADhAwAA5AMAAOoDAAAfBAAAUgQAAIMEAAC4BAAA1AQAAOsEAAD+BAAAEgUA"
+ "ACYFAAA6BQAAUQUAAG4FAACTBQAAtAUAAN0FAAD/BQAAHgYAADgGAABKBgAAVgYAAFkGAABdBgAA"
+ "YgYAAGYGAAB7BgAAgAYAAI8GAACdBgAAtAYAAMMGAADSBgAA3gYAAPIGAAD4BgAABgcAAA4HAAAU"
+ "BwAAGgcAAB8HAAAoBwAANAcAADoHAAABAAAABgAAAAcAAAAIAAAACQAAAAoAAAALAAAADAAAAA0A"
+ "AAAOAAAADwAAABAAAAARAAAAEgAAABMAAAAUAAAAFQAAABYAAAAXAAAAGAAAABoAAAAeAAAAAgAA"
+ "AAAAAACMAwAABQAAAAwAAACUAwAABQAAAA4AAACgAwAABAAAAA8AAAAAAAAAGgAAABQAAAAAAAAA"
+ "GwAAABQAAACsAwAAHAAAABQAAACMAwAAHQAAABQAAAC0AwAAHQAAABQAAAC8AwAAAwADAAMAAAAE"
+ "AAwAJAAAAAoABgAsAAAABAAEAAAAAAAEAAAAHwAAAAQAAQAoAAAABAAIACoAAAAEAAQALwAAAAYA"
+ "BQAtAAAACAAEAAAAAAANAAcAAAAAAA8AAgAlAAAAEAADACkAAAASAAYAIQAAAJYHAACWBwAABAAA"
+ "AAEAAAAIAAAAAAAAABkAAABkAwAAnQcAAAAAAAAEAAAAAgAAAAEAAABjBwAAAQAAAIsHAAACAAAA"
+ "iwcAAJMHAAABAAEAAQAAAEEHAAAEAAAAcBAGAAAADgADAAIAAAAAAEYHAAADAAAAkAABAg8AAAAF"
+ "AAMABAAAAE0HAAAQAAAAcQAJAAAADAAcAQQAbkAIABBDDAAiAQ0AcCAHAAEAEQEEAAEAAgAAAFYH"
+ "AAAMAAAAYgACABIhEjL8IAAAIQAKAW4gBQAQAA4AAwABAAIAAABdBwAACwAAABIgEjH8IAEAEAAK"
+ "ABJRcSAKAAEADgAAAAAAAAAAAAAAAwAAAAAAAAABAAAAmAIAAAIAAACgAgAABAAAAKgCAAACAAAA"
+ "AAAAAAMAAAAPAAkAEQAAAAMAAAAHAAkAEQAAAAEAAAAAAAAAAQAAAA4AAAABAAAAFQAGPGluaXQ+"
+ "AAFJAANJSUkADUlOVk9LRV9TVEFUSUMAAUwABExMTEwAM0xjb20vYW5kcm9pZC9qYWNrL2Fubm90"
+ "YXRpb25zL0NhbGxlZEJ5SW52b2tlQ3VzdG9tOwAxTGNvbS9hbmRyb2lkL2phY2svYW5ub3RhdGlv"
+ "bnMvTGlua2VyTWV0aG9kSGFuZGxlOwAvTGNvbS9hbmRyb2lkL2phY2svYW5ub3RhdGlvbnMvTWV0"
+ "aG9kSGFuZGxlS2luZDsAM0xjb20vYW5kcm9pZC9qYWNrL2phdmE3L2ludm9rZWN1c3RvbS90ZXN0"
+ "MDAxL1Rlc3RzOwAaTGRhbHZpay9hbm5vdGF0aW9uL1Rocm93czsAFUxqYXZhL2lvL1ByaW50U3Ry"
+ "ZWFtOwARTGphdmEvbGFuZy9DbGFzczsAEkxqYXZhL2xhbmcvT2JqZWN0OwASTGphdmEvbGFuZy9T"
+ "dHJpbmc7ABJMamF2YS9sYW5nL1N5c3RlbTsAFUxqYXZhL2xhbmcvVGhyb3dhYmxlOwAbTGphdmEv"
+ "bGFuZy9pbnZva2UvQ2FsbFNpdGU7ACNMamF2YS9sYW5nL2ludm9rZS9Db25zdGFudENhbGxTaXRl"
+ "OwAfTGphdmEvbGFuZy9pbnZva2UvTWV0aG9kSGFuZGxlOwAnTGphdmEvbGFuZy9pbnZva2UvTWV0"
+ "aG9kSGFuZGxlcyRMb29rdXA7ACBMamF2YS9sYW5nL2ludm9rZS9NZXRob2RIYW5kbGVzOwAdTGph"
+ "dmEvbGFuZy9pbnZva2UvTWV0aG9kVHlwZTsAGExqdW5pdC9mcmFtZXdvcmsvQXNzZXJ0OwAQTG9y"
+ "Zy9qdW5pdC9UZXN0OwAKVGVzdHMuamF2YQABVgACVkkAA1ZJSQACVkwAE1tMamF2YS9sYW5nL1N0"
+ "cmluZzsAA2FkZAANYXJndW1lbnRUeXBlcwAMYXNzZXJ0RXF1YWxzABVlbWl0dGVyOiBqYWNrLTQu"
+ "MC1lbmcADWVuY2xvc2luZ1R5cGUADWZpZWxkQ2FsbFNpdGUACmZpbmRTdGF0aWMAEmludm9rZU1l"
+ "dGhvZEhhbmRsZQAEa2luZAAMbGlua2VyTWV0aG9kAAZsb29rdXAABG1haW4ABG5hbWUAA291dAAH"
+ "cHJpbnRsbgAKcmV0dXJuVHlwZQAEdGVzdAAFdmFsdWUAIgAHDgAvAgAABw4ANQMAAAAHDqUAPwEA"
+ "Bw60ADsABw6lAAABBCAcAhgAGAAmHAEdAgQgHAMYDxgJGBEjGAQnGwArFygrFx8uGAACBQEwHAEY"
+ "CwETAAMWABcfFQABAAQBAQkAgYAEtAUBCswFAQrkBQEJlAYEAbwGAAAAEwAAAAAAAAABAAAAAAAA"
+ "AAEAAAAxAAAAcAAAAAIAAAAWAAAANAEAAAMAAAAJAAAAjAEAAAQAAAADAAAA+AEAAAUAAAALAAAA"
+ "EAIAAAcAAAACAAAAaAIAAAYAAAABAAAAcAIAAAgAAAABAAAAkAIAAAMQAAADAAAAmAIAAAEgAAAF"
+ "AAAAtAIAAAYgAAABAAAAZAMAAAEQAAAGAAAAjAMAAAIgAAAxAAAAwgMAAAMgAAAFAAAAQQcAAAQg"
+ "AAADAAAAYwcAAAUgAAABAAAAlgcAAAAgAAABAAAAnQcAAAAQAAABAAAAvAcAAA==",
+ // https://cs.corp.google.com/android/toolchain/jack/jack-tests/tests/com/android/jack/java7/invokecustom/test002/Tests.java
+ "ZGV4CjAzOAAzq3aGAwKhT4QQj4lqNfZJAO8Tm24uTyNICQAAcAAAAHhWNBIAAAAAAAAAAGAIAAA2"
+ "AAAAcAAAABgAAABIAQAACQAAAKgBAAAEAAAAFAIAAA0AAAA0AgAAAQAAAKQCAAB8BgAAzAIAACYE"
+ "AAAwBAAAOAQAAEQEAABHBAAATAQAAE8EAABVBAAAigQAALwEAADtBAAAIgUAAD4FAABVBQAAaAUA"
+ "AH0FAACRBQAApQUAALkFAADQBQAA7QUAABIGAAAzBgAAXAYAAH4GAACdBgAAtwYAAMkGAADPBgAA"
+ "2wYAAN4GAADiBgAA5wYAAOsGAAD/BgAAFAcAABkHAAAoBwAANgcAAE0HAABcBwAAawcAAH4HAACK"
+ "BwAAkAcAAJgHAACeBwAAqgcAALAHAAC1BwAAxgcAAM8HAADbBwAA4QcAAAMAAAAHAAAACAAAAAkA"
+ "AAAKAAAACwAAAAwAAAANAAAADgAAAA8AAAAQAAAAEQAAABIAAAATAAAAFAAAABUAAAAWAAAAFwAA"
+ "ABgAAAAZAAAAGgAAAB0AAAAhAAAAIgAAAAQAAAAAAAAA8AMAAAYAAAAPAAAA+AMAAAUAAAAQAAAA"
+ "AAAAAAYAAAASAAAABAQAAB0AAAAVAAAAAAAAAB4AAAAVAAAAEAQAAB8AAAAVAAAA8AMAACAAAAAV"
+ "AAAAGAQAACAAAAAVAAAAIAQAAAMAAwACAAAABAANACgAAAAIAAcAGwAAAAsABgAwAAAABAAEAAAA"
+ "AAAEAAQAAQAAAAQAAAAjAAAABAAIAC0AAAAEAAQANAAAAAYABQAyAAAACQAEAAEAAAAMAAQAMQAA"
+ "AA4ABwABAAAAEAABACoAAAARAAIALAAAABIAAwAuAAAAEwAGACUAAAA4CAAAOAgAAAQAAAABAAAA"
+ "CQAAAAAAAAAcAAAA0AMAAD8IAAAAAAAAAQAAAAEAAAABAAAADggAAAIAAAAtCAAANQgAAAgAAAAE"
+ "AAEA6AcAACoAAABxAAoAAAAMABwBBAAbAiMAAABiAwIAYgQCABIVI1UWAGIGAgASB00GBQdxMAsA"
+ "QwUMA25ACQAQMgwAIgEOAHAgCAABAGkBAQAOAA0AbhAHAAAAKPsAAAAAJAABAAEBDCUBAAEAAQAA"
+ "APUHAAAEAAAAcBAGAAAADgADAAIAAAAAAPoHAAADAAAAkAABAg8AAAAEAAEAAgAAAAEIAAAMAAAA"
+ "YgADABIhEjL8IAAAIQAKAW4gBQAQAA4AAwABAAIAAAAICAAACwAAABIgEjH8IAEAEAAKABJRcSAM"
+ "AAEADgAAAAAAAAAAAAAAAgAAAAAAAAACAAAAzAIAAAQAAADUAgAAAgAAAAAAAAADAAAABwAKABIA"
+ "AAADAAAABwAHABYAAAABAAAAAAAAAAEAAAAPAAAAAQAAABcACDxjbGluaXQ+AAY8aW5pdD4ACkdF"
+ "VF9TVEFUSUMAAUkAA0lJSQABTAAETExMTAAzTGNvbS9hbmRyb2lkL2phY2svYW5ub3RhdGlvbnMv"
+ "Q2FsbGVkQnlJbnZva2VDdXN0b207ADBMY29tL2FuZHJvaWQvamFjay9hbm5vdGF0aW9ucy9MaW5r"
+ "ZXJGaWVsZEhhbmRsZTsAL0xjb20vYW5kcm9pZC9qYWNrL2Fubm90YXRpb25zL01ldGhvZEhhbmRs"
+ "ZUtpbmQ7ADNMY29tL2FuZHJvaWQvamFjay9qYXZhNy9pbnZva2VjdXN0b20vdGVzdDAwMi9UZXN0"
+ "czsAGkxkYWx2aWsvYW5ub3RhdGlvbi9UaHJvd3M7ABVMamF2YS9pby9QcmludFN0cmVhbTsAEUxq"
+ "YXZhL2xhbmcvQ2xhc3M7ABNMamF2YS9sYW5nL0ludGVnZXI7ABJMamF2YS9sYW5nL09iamVjdDsA"
+ "EkxqYXZhL2xhbmcvU3RyaW5nOwASTGphdmEvbGFuZy9TeXN0ZW07ABVMamF2YS9sYW5nL1Rocm93"
+ "YWJsZTsAG0xqYXZhL2xhbmcvaW52b2tlL0NhbGxTaXRlOwAjTGphdmEvbGFuZy9pbnZva2UvQ29u"
+ "c3RhbnRDYWxsU2l0ZTsAH0xqYXZhL2xhbmcvaW52b2tlL01ldGhvZEhhbmRsZTsAJ0xqYXZhL2xh"
+ "bmcvaW52b2tlL01ldGhvZEhhbmRsZXMkTG9va3VwOwAgTGphdmEvbGFuZy9pbnZva2UvTWV0aG9k"
+ "SGFuZGxlczsAHUxqYXZhL2xhbmcvaW52b2tlL01ldGhvZFR5cGU7ABhManVuaXQvZnJhbWV3b3Jr"
+ "L0Fzc2VydDsAEExvcmcvanVuaXQvVGVzdDsABFRZUEUAClRlc3RzLmphdmEAAVYAAlZJAANWSUkA"
+ "AlZMABJbTGphdmEvbGFuZy9DbGFzczsAE1tMamF2YS9sYW5nL1N0cmluZzsAA2FkZAANYXJndW1l"
+ "bnRUeXBlcwAMYXNzZXJ0RXF1YWxzABVlbWl0dGVyOiBqYWNrLTQuMC1lbmcADWVuY2xvc2luZ1R5"
+ "cGUADWZpZWxkQ2FsbFNpdGUAEWZpZWxkTWV0aG9kSGFuZGxlAApmaW5kU3RhdGljAARraW5kAAZs"
+ "b29rdXAABG1haW4ACm1ldGhvZFR5cGUABG5hbWUAA291dAAPcHJpbnRTdGFja1RyYWNlAAdwcmlu"
+ "dGxuAApyZXR1cm5UeXBlAAR0ZXN0AAV2YWx1ZQAoAAcOAR0PAnh3Jh4AIQAHDgA2AgAABw4APwEA"
+ "Bw60ADsABw6lAAABBCQcAhgAGAApHAEdAgMnGAQrGwAvFygvFyMzGAACBQE1HAEYDAEUAAMWABcj"
+ "FQABAAQBAQkAiIAE4AUBgYAE0AYBCugGAQmABwQBqAcAAAATAAAAAAAAAAEAAAAAAAAAAQAAADYA"
+ "AABwAAAAAgAAABgAAABIAQAAAwAAAAkAAACoAQAABAAAAAQAAAAUAgAABQAAAA0AAAA0AgAABwAA"
+ "AAIAAACcAgAABgAAAAEAAACkAgAACAAAAAEAAADEAgAAAxAAAAIAAADMAgAAASAAAAUAAADgAgAA"
+ "BiAAAAEAAADQAwAAARAAAAYAAADwAwAAAiAAADYAAAAmBAAAAyAAAAUAAADoBwAABCAAAAMAAAAO"
+ "CAAABSAAAAEAAAA4CAAAACAAAAEAAAA/CAAAABAAAAEAAABgCAAA",
+ // https://cs.corp.google.com/android/toolchain/jack/jack-tests/tests/com/android/jack/java7/invokecustom/test003/Tests.java
+ "ZGV4CjAzOABjnhkFatj30/7cHTCJsfr7vAjz9/p+Y+TcCAAAcAAAAHhWNBIAAAAAAAAAAPQHAAAx"
+ "AAAAcAAAABYAAAA0AQAACQAAAIwBAAADAAAA+AEAAAsAAAAQAgAAAQAAAHACAABEBgAAmAIAAOoD"
+ "AADyAwAA9QMAAP4DAAANBAAAEAQAABYEAABLBAAAfgQAAK8EAADkBAAAAAUAABcFAAAqBQAAPgUA"
+ "AFIFAABmBQAAfQUAAJoFAAC/BQAA4AUAAAkGAAArBgAASgYAAGQGAAB2BgAAggYAAIUGAACJBgAA"
+ "jgYAAJIGAACnBgAArAYAALsGAADJBgAA4AYAAO8GAAD+BgAACgcAAB4HAAAkBwAAMgcAADoHAABA"
+ "BwAARgcAAEsHAABUBwAAYAcAAGYHAAABAAAABgAAAAcAAAAIAAAACQAAAAoAAAALAAAADAAAAA0A"
+ "AAAOAAAADwAAABAAAAARAAAAEgAAABMAAAAUAAAAFQAAABYAAAAXAAAAGAAAABoAAAAeAAAAAgAA"
+ "AAAAAACkAwAABQAAAAwAAAC0AwAABQAAAA4AAADAAwAABAAAAA8AAAAAAAAAGgAAABQAAAAAAAAA"
+ "GwAAABQAAADMAwAAHAAAABQAAADUAwAAHQAAABQAAADcAwAAHQAAABQAAADkAwAAAwADAAMAAAAE"
+ "AAwAJAAAAAoABgAsAAAABAAEAAAAAAAEAAAAHwAAAAQAAQAoAAAABAAIACoAAAAEAAQALwAAAAYA"
+ "BQAtAAAACAAEAAAAAAANAAcAAAAAAA8AAgAlAAAAEAADACkAAAASAAYAIQAAAM4HAADOBwAABAAA"
+ "AAEAAAAIAAAAAAAAABkAAAB8AwAA1QcAAAAAAAAEAAAAAgAAAAEAAACTBwAAAQAAAMMHAAACAAAA"
+ "wwcAAMsHAAABAAEAAQAAAG0HAAAEAAAAcBAGAAAADgAHAAYAAAAAAHIHAAAHAAAAkAABArAwsECw"
+ "ULBgDwAAAAUAAwAEAAAAfQcAABAAAABxAAkAAAAMABwBBABuQAgAEEMMACIBDQBwIAcAAQARAQgA"
+ "AQACAAAAhgcAABAAAABiBgIAEhASIRIyEkMSVBJl/QYAAAAACgBuIAUABgAOAAcAAQACAAAAjQcA"
+ "ABAAAAASEBIhEjISQxJUEmX9BgEAAAAKABMBFQBxIAoAAQAOAAAAAAAAAAAAAwAAAAAAAAABAAAA"
+ "mAIAAAIAAACgAgAABAAAAKgCAAAGAAAAAAAAAAAAAAAAAAAAAwAAAA8ACQARAAAAAwAAAAcACQAR"
+ "AAAAAQAAAAAAAAACAAAAAAAAAAEAAAAOAAAAAQAAABUABjxpbml0PgABSQAHSUlJSUlJSQANSU5W"
+ "T0tFX1NUQVRJQwABTAAETExMTAAzTGNvbS9hbmRyb2lkL2phY2svYW5ub3RhdGlvbnMvQ2FsbGVk"
+ "QnlJbnZva2VDdXN0b207ADFMY29tL2FuZHJvaWQvamFjay9hbm5vdGF0aW9ucy9MaW5rZXJNZXRo"
+ "b2RIYW5kbGU7AC9MY29tL2FuZHJvaWQvamFjay9hbm5vdGF0aW9ucy9NZXRob2RIYW5kbGVLaW5k"
+ "OwAzTGNvbS9hbmRyb2lkL2phY2svamF2YTcvaW52b2tlY3VzdG9tL3Rlc3QwMDMvVGVzdHM7ABpM"
+ "ZGFsdmlrL2Fubm90YXRpb24vVGhyb3dzOwAVTGphdmEvaW8vUHJpbnRTdHJlYW07ABFMamF2YS9s"
+ "YW5nL0NsYXNzOwASTGphdmEvbGFuZy9PYmplY3Q7ABJMamF2YS9sYW5nL1N0cmluZzsAEkxqYXZh"
+ "L2xhbmcvU3lzdGVtOwAVTGphdmEvbGFuZy9UaHJvd2FibGU7ABtMamF2YS9sYW5nL2ludm9rZS9D"
+ "YWxsU2l0ZTsAI0xqYXZhL2xhbmcvaW52b2tlL0NvbnN0YW50Q2FsbFNpdGU7AB9MamF2YS9sYW5n"
+ "L2ludm9rZS9NZXRob2RIYW5kbGU7ACdMamF2YS9sYW5nL2ludm9rZS9NZXRob2RIYW5kbGVzJExv"
+ "b2t1cDsAIExqYXZhL2xhbmcvaW52b2tlL01ldGhvZEhhbmRsZXM7AB1MamF2YS9sYW5nL2ludm9r"
+ "ZS9NZXRob2RUeXBlOwAYTGp1bml0L2ZyYW1ld29yay9Bc3NlcnQ7ABBMb3JnL2p1bml0L1Rlc3Q7"
+ "AApUZXN0cy5qYXZhAAFWAAJWSQADVklJAAJWTAATW0xqYXZhL2xhbmcvU3RyaW5nOwADYWRkAA1h"
+ "cmd1bWVudFR5cGVzAAxhc3NlcnRFcXVhbHMAFWVtaXR0ZXI6IGphY2stNC4wLWVuZwANZW5jbG9z"
+ "aW5nVHlwZQANZmllbGRDYWxsU2l0ZQAKZmluZFN0YXRpYwASaW52b2tlTWV0aG9kSGFuZGxlAARr"
+ "aW5kAAxsaW5rZXJNZXRob2QABmxvb2t1cAAEbWFpbgAEbmFtZQADb3V0AAdwcmludGxuAApyZXR1"
+ "cm5UeXBlAAR0ZXN0AAV2YWx1ZQAiAAcOAC8GAAAAAAAABw4ANQMAAAAHDqUAPwEABw7wADsABw7w"
+ "AAABBCAcBhgAGAAYABgAGAAYACYcAR0CBCAcAxgPGAkYESMYBCcbACsXKCsXHy4YAAIFATAcARgL"
+ "ARMAAxYAFx8VAAEABAEBCQCBgAS0BQEKzAUBCuwFAQmcBgQBzAYAAAATAAAAAAAAAAEAAAAAAAAA"
+ "AQAAADEAAABwAAAAAgAAABYAAAA0AQAAAwAAAAkAAACMAQAABAAAAAMAAAD4AQAABQAAAAsAAAAQ"
+ "AgAABwAAAAIAAABoAgAABgAAAAEAAABwAgAACAAAAAEAAACQAgAAAxAAAAMAAACYAgAAASAAAAUA"
+ "AAC0AgAABiAAAAEAAAB8AwAAARAAAAcAAACkAwAAAiAAADEAAADqAwAAAyAAAAUAAABtBwAABCAA"
+ "AAMAAACTBwAABSAAAAEAAADOBwAAACAAAAEAAADVBwAAABAAAAEAAAD0BwAA",
+ // https://cs.corp.google.com/android/toolchain/jack/jack-tests/tests/com/android/jack/java7/invokecustom/test004/Tests.java
+ "ZGV4CjAzOABvUVfbV74qWbSOEsgKP+EzahlNQLW2/8TMDAAAcAAAAHhWNBIAAAAAAAAAAOQLAABS"
+ "AAAAcAAAAB8AAAC4AQAAEAAAADQCAAADAAAA9AIAABIAAAAMAwAAAQAAAKQDAAAACQAAzAMAANYF"
+ "AADZBQAA4QUAAOkFAADsBQAA7wUAAPIFAAD1BQAA/AUAAP8FAAAEBgAAEwYAABYGAAAZBgAAHwYA"
+ "AC8GAABkBgAAjQYAAMAGAADxBgAAJgcAAEUHAABhBwAAeAcAAIoHAACdBwAAsQcAAMUHAADZBwAA"
+ "8AcAAA0IAAAyCAAAUwgAAHwIAACeCAAAvQgAANcIAADpCAAA7AgAAPgIAAD7CAAAAAkAAAYJAAAM"
+ "CQAAEAkAABUJAAAaCQAAHgkAACMJAAAnCQAAKgkAADMJAABICQAATQkAAFwJAABqCQAAdgkAAIQJ"
+ "AACPCQAAmgkAAKYJAACzCQAAygkAANkJAADoCQAA9AkAAAAKAAAKCgAAHgoAACQKAAAyCgAAPQoA"
+ "AEUKAABLCgAAYgoAAGgKAABtCgAAdgoAAIIKAACOCgAAmwoAAKEKAAADAAAABAAAAAUAAAAGAAAA"
+ "CAAAAAsAAAAPAAAAEAAAABEAAAASAAAAEwAAABQAAAAVAAAAFgAAABgAAAAZAAAAGgAAABsAAAAc"
+ "AAAAHQAAAB4AAAAfAAAAIAAAACEAAAAiAAAAIwAAACQAAAAlAAAAJwAAADEAAAAzAAAACQAAAAQA"
+ "AABMBQAADgAAABMAAABUBQAADQAAABUAAAB0BQAADAAAABYAAAAAAAAAJwAAABwAAAAAAAAAKAAA"
+ "ABwAAACABQAAKQAAABwAAACIBQAAKgAAABwAAACUBQAAKwAAABwAAACgBQAALAAAABwAAABMBQAA"
+ "LQAAABwAAACoBQAALwAAABwAAACwBQAALwAAABwAAAC4BQAALgAAABwAAADABQAAMAAAABwAAADI"
+ "BQAALgAAABwAAADQBQAACQAJAAoAAAAKABMAPwAAABEADQBLAAAACgAEAAIAAAAKAAAANAAAAAoA"
+ "AQBFAAAACgAPAEgAAAAKAAQAUAAAAA0ACABMAAAADwAEAAIAAAAUAA0AAgAAABYAAgBAAAAAFwAD"
+ "AEcAAAAZAAUANgAAABkABgA2AAAAGQAHADYAAAAZAAkANgAAABkACgA2AAAAGQALADYAAAAZAAwA"
+ "NgAAABkADgA3AAAAnQsAAJ0LAAAKAAAAAQAAAA8AAAAAAAAAJgAAACQFAADGCwAAAAAAAAQAAAAC"
+ "AAAAAQAAAN4KAAACAAAAegsAAJILAAACAAAAkgsAAJoLAAABAAEAAQAAAKgKAAAEAAAAcBAGAAAA"
+ "DgADAAIAAAAAAK0KAAADAAAAkAABAg8AAAAYAA8ABgAAALQKAABTAAAAcRARAAwAEhJxIA0A0gAT"
+ "AmEAcSAKAOIAEwIABHEgDQDyABISAgAQAHEgDQACABICFAOamTFBAgARAHEwDAADAhYGAAAYApqZ"
+ "mZmZmQFABQQSAHcGCwACABsCBwAAAAgAFABxIBAAAgAcAgoACAAVAHEgDwACABcCFc1bBwUAFgBx"
+ "QA4AMhBxAAkAAAAMAhwDCgBuQAgAMroMAiIDFABwIAcAIwARAwAABAABAAIAAADRCgAADAAAAGIA"
+ "AgASIRIy/CAAACEACgFuIAUAEAAOAAMAAQACAAAA2AoAAAsAAAASIBIx/CABABAACgASUXEgDQAB"
+ "AA4AAAAAAAAAAAAAAAMAAAAAAAAAAQAAAMwDAAACAAAA1AMAAAQAAADgAwAAAgAAAAQABAANAAAA"
+ "FgAQABgAHQAAAAEAGwAEAAMAAgAQAA4ABQAAAAMAAAAOABAAGAAAAAIAAAABAAEAAwAAAAIAAgAC"
+ "AAAAAwAAAAMAAwADAAAAAQAAAAQAAAACAAAABQAFAAIAAAAPAA8AAgAAABAAEAABAAAAFQAAAAEA"
+ "AAAdAAAAAQAAAB4AASgABjwqPjtKKQAGPGluaXQ+AAFCAAFDAAFEAAFGAAVIZWxsbwABSQADSUlJ"
+ "AA1JTlZPS0VfU1RBVElDAAFKAAFMAARMTExMAA5MTExMWkJDU0lGRExMSgAzTGNvbS9hbmRyb2lk"
+ "L2phY2svYW5ub3RhdGlvbnMvQ2FsbGVkQnlJbnZva2VDdXN0b207ACdMY29tL2FuZHJvaWQvamFj"
+ "ay9hbm5vdGF0aW9ucy9Db25zdGFudDsAMUxjb20vYW5kcm9pZC9qYWNrL2Fubm90YXRpb25zL0xp"
+ "bmtlck1ldGhvZEhhbmRsZTsAL0xjb20vYW5kcm9pZC9qYWNrL2Fubm90YXRpb25zL01ldGhvZEhh"
+ "bmRsZUtpbmQ7ADNMY29tL2FuZHJvaWQvamFjay9qYXZhNy9pbnZva2VjdXN0b20vdGVzdDAwNC9U"
+ "ZXN0czsAHUxkYWx2aWsvYW5ub3RhdGlvbi9TaWduYXR1cmU7ABpMZGFsdmlrL2Fubm90YXRpb24v"
+ "VGhyb3dzOwAVTGphdmEvaW8vUHJpbnRTdHJlYW07ABBMamF2YS9sYW5nL0NsYXNzABFMamF2YS9s"
+ "YW5nL0NsYXNzOwASTGphdmEvbGFuZy9PYmplY3Q7ABJMamF2YS9sYW5nL1N0cmluZzsAEkxqYXZh"
+ "L2xhbmcvU3lzdGVtOwAVTGphdmEvbGFuZy9UaHJvd2FibGU7ABtMamF2YS9sYW5nL2ludm9rZS9D"
+ "YWxsU2l0ZTsAI0xqYXZhL2xhbmcvaW52b2tlL0NvbnN0YW50Q2FsbFNpdGU7AB9MamF2YS9sYW5n"
+ "L2ludm9rZS9NZXRob2RIYW5kbGU7ACdMamF2YS9sYW5nL2ludm9rZS9NZXRob2RIYW5kbGVzJExv"
+ "b2t1cDsAIExqYXZhL2xhbmcvaW52b2tlL01ldGhvZEhhbmRsZXM7AB1MamF2YS9sYW5nL2ludm9r"
+ "ZS9NZXRob2RUeXBlOwAYTGp1bml0L2ZyYW1ld29yay9Bc3NlcnQ7ABBMb3JnL2p1bml0L1Rlc3Q7"
+ "AAFTAApUZXN0cy5qYXZhAAFWAANWQ0MABFZEREQABFZGRkYAAlZJAANWSUkAA1ZKSgACVkwAA1ZM"
+ "TAACVloAAVoAB1pCQ1NJRkQAE1tMamF2YS9sYW5nL1N0cmluZzsAA2FkZAANYXJndW1lbnRUeXBl"
+ "cwAMYXNzZXJ0RXF1YWxzAAphc3NlcnRUcnVlAAxib29sZWFuVmFsdWUACWJ5dGVWYWx1ZQAJY2hh"
+ "clZhbHVlAApjbGFzc1ZhbHVlAAtkb3VibGVWYWx1ZQAVZW1pdHRlcjogamFjay00LjAtZW5nAA1l"
+ "bmNsb3NpbmdUeXBlAA1maWVsZENhbGxTaXRlAApmaW5kU3RhdGljAApmbG9hdFZhbHVlAAhpbnRW"
+ "YWx1ZQASaW52b2tlTWV0aG9kSGFuZGxlAARraW5kAAxsaW5rZXJNZXRob2QACWxvbmdWYWx1ZQAG"
+ "bG9va3VwAARtYWluABVtZXRob2RIYW5kbGVFeHRyYUFyZ3MABG5hbWUAA291dAAHcHJpbnRsbgAK"
+ "cmV0dXJuVHlwZQAKc2hvcnRWYWx1ZQALc3RyaW5nVmFsdWUABHRlc3QABXZhbHVlACMABw4ANwIA"
+ "AAcOAD4NAAAAAAAAAAAAAAAAAAcOPEtaWmmWw4d4h6UAUgEABw60AE4ABw6lAAAGBTUcAhgEGARD"
+ "HAEdCAQ1HA0YFhgQGBgYHRgAGAEYGxgEGAMYAhgQGA4YBT4YCkQbAEoXRUkcCh0HATgcAT8dBwE5"
+ "HAEAAR0HATocAQNhHQcBThwBIgAEHQcBQhwBBAEdBwFBHAFwmpkxQR0HATwcAfGamZmZmZkBQB0H"
+ "AU8cARcHHQcBOxwBGAodBwFGHAFmFc1bB0oXNE0YBAILAVEcCRcAFyAXGhciFzIXGhcXFwEXHQIM"
+ "AVEcARgSARoADRYAFzQVAAQBBAEEYSQABAQBcJqZMUHxmpmZmZmZAUAXBxgKZhXNWwcBAAQBAQkA"
+ "gYAE7AcBCoQIAQqcCAEJ1AkEAfwJAAATAAAAAAAAAAEAAAAAAAAAAQAAAFIAAABwAAAAAgAAAB8A"
+ "AAC4AQAAAwAAABAAAAA0AgAABAAAAAMAAAD0AgAABQAAABIAAAAMAwAABwAAAAIAAACcAwAABgAA"
+ "AAEAAACkAwAACAAAAAEAAADEAwAAAxAAAAMAAADMAwAAASAAAAUAAADsAwAABiAAAAEAAAAkBQAA"
+ "ARAAAA0AAABMBQAAAiAAAFIAAADWBQAAAyAAAAUAAACoCgAABCAAAAQAAADeCgAABSAAAAEAAACd"
+ "CwAAACAAAAEAAADGCwAAABAAAAEAAADkCwAA"
+};
+
+TEST_F(DexFileVerifierTest, InvokeCustomDexSamples) {
+ for (size_t i = 0; i < arraysize(kInvokeCustomDexFiles); ++i) {
+ size_t length;
+ std::unique_ptr<uint8_t[]> dex_bytes(DecodeBase64(kInvokeCustomDexFiles[i], &length));
+ CHECK(dex_bytes != nullptr);
+ // Note: `dex_file` will be destroyed before `dex_bytes`.
+ std::unique_ptr<DexFile> dex_file(GetDexFile(dex_bytes.get(), length));
+ std::string error_msg;
+ EXPECT_TRUE(DexFileVerifier::Verify(dex_file.get(),
+ dex_file->Begin(),
+ dex_file->Size(),
+ "good checksum, verify",
+ /*verify_checksum*/ true,
+ &error_msg));
+ // TODO(oth): Test corruptions (b/35308502)
+ }
+}
+
} // namespace art
diff --git a/runtime/dex_instruction.cc b/runtime/dex_instruction.cc
index 37f3ac92e9..091085a85c 100644
--- a/runtime/dex_instruction.cc
+++ b/runtime/dex_instruction.cc
@@ -407,6 +407,20 @@ std::string Instruction::DumpString(const DexFile* file) const {
break;
}
FALLTHROUGH_INTENDED;
+ case INVOKE_CUSTOM:
+ if (file != nullptr) {
+ os << opcode << " {";
+ uint32_t call_site_idx = VRegB_35c();
+ for (size_t i = 0; i < VRegA_35c(); ++i) {
+ if (i != 0) {
+ os << ", ";
+ }
+ os << "v" << arg[i];
+ }
+ os << "}, // call_site@" << call_site_idx;
+ break;
+ }
+ FALLTHROUGH_INTENDED;
default:
os << opcode << " {v" << arg[0] << ", v" << arg[1] << ", v" << arg[2]
<< ", v" << arg[3] << ", v" << arg[4] << "}, thing@" << VRegB_35c();
@@ -415,6 +429,8 @@ std::string Instruction::DumpString(const DexFile* file) const {
break;
}
case k3rc: {
+ uint16_t first_reg = VRegC_3rc();
+ uint16_t last_reg = VRegC_3rc() + VRegA_3rc() - 1;
switch (Opcode()) {
case INVOKE_VIRTUAL_RANGE:
case INVOKE_SUPER_RANGE:
@@ -423,7 +439,7 @@ std::string Instruction::DumpString(const DexFile* file) const {
case INVOKE_INTERFACE_RANGE:
if (file != nullptr) {
uint32_t method_idx = VRegB_3rc();
- os << StringPrintf("%s, {v%d .. v%d}, ", opcode, VRegC_3rc(), (VRegC_3rc() + VRegA_3rc() - 1))
+ os << StringPrintf("%s, {v%d .. v%d}, ", opcode, first_reg, last_reg)
<< file->PrettyMethod(method_idx) << " // method@" << method_idx;
break;
}
@@ -431,14 +447,22 @@ std::string Instruction::DumpString(const DexFile* file) const {
case INVOKE_VIRTUAL_RANGE_QUICK:
if (file != nullptr) {
uint32_t method_idx = VRegB_3rc();
- os << StringPrintf("%s, {v%d .. v%d}, ", opcode, VRegC_3rc(), (VRegC_3rc() + VRegA_3rc() - 1))
+ os << StringPrintf("%s, {v%d .. v%d}, ", opcode, first_reg, last_reg)
<< "// vtable@" << method_idx;
break;
}
FALLTHROUGH_INTENDED;
+ case INVOKE_CUSTOM_RANGE:
+ if (file != nullptr) {
+ uint32_t call_site_idx = VRegB_3rc();
+ os << StringPrintf("%s, {v%d .. v%d}, ", opcode, first_reg, last_reg)
+ << "// call_site@" << call_site_idx;
+ break;
+ }
+ FALLTHROUGH_INTENDED;
default:
- os << StringPrintf("%s, {v%d .. v%d}, thing@%d", opcode, VRegC_3rc(),
- (VRegC_3rc() + VRegA_3rc() - 1), VRegB_3rc());
+ os << StringPrintf("%s, {v%d .. v%d}, ", opcode, first_reg, last_reg)
+ << "thing@" << VRegB_3rc();
break;
}
break;
diff --git a/runtime/dex_instruction.h b/runtime/dex_instruction.h
index 578550cae2..d269110570 100644
--- a/runtime/dex_instruction.h
+++ b/runtime/dex_instruction.h
@@ -126,14 +126,15 @@ class Instruction {
enum IndexType {
kIndexUnknown = 0,
- kIndexNone, // has no index
- kIndexTypeRef, // type reference index
- kIndexStringRef, // string reference index
- kIndexMethodRef, // method reference index
- kIndexFieldRef, // field reference index
- kIndexFieldOffset, // field offset (for static linked fields)
- kIndexVtableOffset, // vtable offset (for static linked methods)
- kIndexMethodAndProtoRef // method and a proto reference index (for invoke-polymorphic)
+ kIndexNone, // has no index
+ kIndexTypeRef, // type reference index
+ kIndexStringRef, // string reference index
+ kIndexMethodRef, // method reference index
+ kIndexFieldRef, // field reference index
+ kIndexFieldOffset, // field offset (for static linked fields)
+ kIndexVtableOffset, // vtable offset (for static linked methods)
+ kIndexMethodAndProtoRef, // method and a proto reference index (for invoke-polymorphic)
+ kIndexCallSiteRef, // call site reference index
};
enum Flags {
@@ -165,31 +166,32 @@ class Instruction {
};
enum VerifyFlag {
- kVerifyNone = 0x000000,
- kVerifyRegA = 0x000001,
- kVerifyRegAWide = 0x000002,
- kVerifyRegB = 0x000004,
- kVerifyRegBField = 0x000008,
- kVerifyRegBMethod = 0x000010,
- kVerifyRegBNewInstance = 0x000020,
- kVerifyRegBString = 0x000040,
- kVerifyRegBType = 0x000080,
- kVerifyRegBWide = 0x000100,
- kVerifyRegC = 0x000200,
- kVerifyRegCField = 0x000400,
- kVerifyRegCNewArray = 0x000800,
- kVerifyRegCType = 0x001000,
- kVerifyRegCWide = 0x002000,
- kVerifyArrayData = 0x004000,
- kVerifyBranchTarget = 0x008000,
- kVerifySwitchTargets = 0x010000,
- kVerifyVarArg = 0x020000,
- kVerifyVarArgNonZero = 0x040000,
- kVerifyVarArgRange = 0x080000,
- kVerifyVarArgRangeNonZero = 0x100000,
- kVerifyRuntimeOnly = 0x200000,
- kVerifyError = 0x400000,
- kVerifyRegHPrototype = 0x800000
+ kVerifyNone = 0x0000000,
+ kVerifyRegA = 0x0000001,
+ kVerifyRegAWide = 0x0000002,
+ kVerifyRegB = 0x0000004,
+ kVerifyRegBField = 0x0000008,
+ kVerifyRegBMethod = 0x0000010,
+ kVerifyRegBNewInstance = 0x0000020,
+ kVerifyRegBString = 0x0000040,
+ kVerifyRegBType = 0x0000080,
+ kVerifyRegBWide = 0x0000100,
+ kVerifyRegC = 0x0000200,
+ kVerifyRegCField = 0x0000400,
+ kVerifyRegCNewArray = 0x0000800,
+ kVerifyRegCType = 0x0001000,
+ kVerifyRegCWide = 0x0002000,
+ kVerifyArrayData = 0x0004000,
+ kVerifyBranchTarget = 0x0008000,
+ kVerifySwitchTargets = 0x0010000,
+ kVerifyVarArg = 0x0020000,
+ kVerifyVarArgNonZero = 0x0040000,
+ kVerifyVarArgRange = 0x0080000,
+ kVerifyVarArgRangeNonZero = 0x0100000,
+ kVerifyRuntimeOnly = 0x0200000,
+ kVerifyError = 0x0400000,
+ kVerifyRegHPrototype = 0x0800000,
+ kVerifyRegBCallSite = 0x1000000
};
static constexpr uint32_t kMaxVarArgRegs = 5;
diff --git a/runtime/dex_instruction_list.h b/runtime/dex_instruction_list.h
index ca2ce1d990..a5ce3c2f8a 100644
--- a/runtime/dex_instruction_list.h
+++ b/runtime/dex_instruction_list.h
@@ -271,8 +271,8 @@
V(0xF9, UNUSED_F9, "unused-f9", k10x, kIndexUnknown, 0, kVerifyError) \
V(0xFA, INVOKE_POLYMORPHIC, "invoke-polymorphic", k45cc, kIndexMethodAndProtoRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero | kVerifyRegHPrototype) \
V(0xFB, INVOKE_POLYMORPHIC_RANGE, "invoke-polymorphic/range", k4rcc, kIndexMethodAndProtoRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero | kVerifyRegHPrototype) \
- V(0xFC, UNUSED_FC, "unused-fc", k10x, kIndexUnknown, 0, kVerifyError) \
- V(0xFD, UNUSED_FD, "unused-fd", k10x, kIndexUnknown, 0, kVerifyError) \
+ V(0xFC, INVOKE_CUSTOM, "invoke-custom", k35c, kIndexCallSiteRef, kContinue | kThrow, kVerifyRegBCallSite) \
+ V(0xFD, INVOKE_CUSTOM_RANGE, "invoke-custom/range", k3rc, kIndexCallSiteRef, kContinue | kThrow, kVerifyRegBCallSite) \
V(0xFE, UNUSED_FE, "unused-fe", k10x, kIndexUnknown, 0, kVerifyError) \
V(0xFF, UNUSED_FF, "unused-ff", k10x, kIndexUnknown, 0, kVerifyError)
diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc
index fb8139b7c6..6301362e09 100644
--- a/runtime/entrypoints/entrypoint_utils.cc
+++ b/runtime/entrypoints/entrypoint_utils.cc
@@ -39,7 +39,7 @@
namespace art {
void CheckReferenceResult(Handle<mirror::Object> o, Thread* self) {
- if (o.Get() == nullptr) {
+ if (o == nullptr) {
return;
}
// Make sure that the result is an instance of the type this method was expected to return.
diff --git a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
index 5b1b2871c2..699cf91c70 100644
--- a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
@@ -53,13 +53,18 @@ static inline void BssWriteBarrier(ArtMethod* outer_method) REQUIRES_SHARED(Lock
}
}
+constexpr Runtime::CalleeSaveType kInitEntrypointSaveType =
+ // TODO: Change allocation entrypoints on MIPS and MIPS64 to kSaveEverything.
+ (kRuntimeISA == kMips || kRuntimeISA == kMips64) ? Runtime::kSaveRefsOnly
+ : Runtime::kSaveEverything;
+
extern "C" mirror::Class* artInitializeStaticStorageFromCode(uint32_t type_idx, Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_) {
// Called to ensure static storage base is initialized for direct static field reads and writes.
// A class may be accessing another class' fields when it doesn't have access, as access has been
// given by inheritance.
ScopedQuickEntrypointChecks sqec(self);
- auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, Runtime::kSaveRefsOnly);
+ auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, kInitEntrypointSaveType);
ArtMethod* caller = caller_and_outer.caller;
mirror::Class* result =
ResolveVerifyAndClinit(dex::TypeIndex(type_idx), caller, self, true, false);
@@ -73,7 +78,7 @@ extern "C" mirror::Class* artInitializeTypeFromCode(uint32_t type_idx, Thread* s
REQUIRES_SHARED(Locks::mutator_lock_) {
// Called when method->dex_cache_resolved_types_[] misses.
ScopedQuickEntrypointChecks sqec(self);
- auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, Runtime::kSaveRefsOnly);
+ auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, kInitEntrypointSaveType);
ArtMethod* caller = caller_and_outer.caller;
mirror::Class* result =
ResolveVerifyAndClinit(dex::TypeIndex(type_idx), caller, self, false, false);
@@ -88,7 +93,7 @@ extern "C" mirror::Class* artInitializeTypeAndVerifyAccessFromCode(uint32_t type
// Called when caller isn't guaranteed to have access to a type and the dex cache may be
// unpopulated.
ScopedQuickEntrypointChecks sqec(self);
- auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, Runtime::kSaveRefsOnly);
+ auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, kInitEntrypointSaveType);
ArtMethod* caller = caller_and_outer.caller;
mirror::Class* result =
ResolveVerifyAndClinit(dex::TypeIndex(type_idx), caller, self, false, true);
@@ -101,11 +106,7 @@ extern "C" mirror::Class* artInitializeTypeAndVerifyAccessFromCode(uint32_t type
extern "C" mirror::String* artResolveStringFromCode(int32_t string_idx, Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_) {
ScopedQuickEntrypointChecks sqec(self);
- auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(
- self,
- // TODO: Change art_quick_resolve_string on MIPS and MIPS64 to kSaveEverything.
- (kRuntimeISA == kMips || kRuntimeISA == kMips64) ? Runtime::kSaveRefsOnly
- : Runtime::kSaveEverything);
+ auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, kInitEntrypointSaveType);
ArtMethod* caller = caller_and_outer.caller;
mirror::String* result = ResolveStringFromCode(caller, dex::StringIndex(string_idx));
if (LIKELY(result != nullptr)) {
diff --git a/runtime/entrypoints/quick/quick_field_entrypoints.cc b/runtime/entrypoints/quick/quick_field_entrypoints.cc
index 4544aef148..822c5a8d9d 100644
--- a/runtime/entrypoints/quick/quick_field_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_field_entrypoints.cc
@@ -48,7 +48,7 @@ ALWAYS_INLINE static inline ArtField* FindInstanceField(uint32_t field_idx,
StackHandleScope<1> hs(self);
HandleWrapper<mirror::Object> h(hs.NewHandleWrapper(obj));
ArtField* field = FindFieldFromCode<type, kAccessCheck>(field_idx, referrer, self, size);
- if (LIKELY(field != nullptr) && UNLIKELY(h.Get() == nullptr)) {
+ if (LIKELY(field != nullptr) && UNLIKELY(h == nullptr)) {
ThrowNullPointerExceptionForFieldAccess(field, /*is_read*/FindFieldTypeIsRead(type));
return nullptr;
}
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 3ef47c427e..c2bca5305d 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -2435,8 +2435,8 @@ extern "C" uintptr_t artInvokePolymorphic(
// Wrap raw_method_handle in a Handle for safety.
StackHandleScope<5> hs(self);
- Handle<mirror::MethodHandleImpl> method_handle(
- hs.NewHandle(ObjPtr<mirror::MethodHandleImpl>::DownCast(MakeObjPtr(raw_method_handle))));
+ Handle<mirror::MethodHandle> method_handle(
+ hs.NewHandle(ObjPtr<mirror::MethodHandle>::DownCast(MakeObjPtr(raw_method_handle))));
raw_method_handle = nullptr;
self->EndAssertNoThreadSuspension(old_cause);
@@ -2497,15 +2497,14 @@ extern "C" uintptr_t artInvokePolymorphic(
// consecutive order.
uint32_t unused_args[Instruction::kMaxVarArgRegs] = {};
uint32_t first_callee_arg = first_arg + 1;
- const bool do_assignability_check = false;
- if (!DoInvokePolymorphic<true /* is_range */, do_assignability_check>(self,
- resolved_method,
- *shadow_frame,
- method_handle,
- method_type,
- unused_args,
- first_callee_arg,
- result)) {
+ if (!DoInvokePolymorphic<true /* is_range */>(self,
+ resolved_method,
+ *shadow_frame,
+ method_handle,
+ method_type,
+ unused_args,
+ first_callee_arg,
+ result)) {
DCHECK(self->IsExceptionPending());
}
diff --git a/runtime/gc/allocation_record.cc b/runtime/gc/allocation_record.cc
index e18a955251..122f7799df 100644
--- a/runtime/gc/allocation_record.cc
+++ b/runtime/gc/allocation_record.cc
@@ -292,7 +292,7 @@ void AllocRecordObjectMap::RecordAllocation(Thread* self,
(kUseReadBarrier && !self->GetWeakRefAccessEnabled()))) {
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
// presence of threads blocking for weak ref access.
- self->CheckEmptyCheckpoint();
+ self->CheckEmptyCheckpointFromWeakRefAccess(Locks::alloc_tracker_lock_);
new_record_condition_.WaitHoldingLocks(self);
}
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index f12ad8058d..f18ffb4aef 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -835,65 +835,9 @@ void ConcurrentCopying::ProcessFalseGrayStack() {
void ConcurrentCopying::IssueEmptyCheckpoint() {
Thread* self = Thread::Current();
ThreadList* thread_list = Runtime::Current()->GetThreadList();
- Barrier* barrier = thread_list->EmptyCheckpointBarrier();
- barrier->Init(self, 0);
- std::vector<uint32_t> runnable_thread_ids; // Used in debug build only
- size_t barrier_count = thread_list->RunEmptyCheckpoint(runnable_thread_ids);
- // If there are no threads to wait which implys that all the checkpoint functions are finished,
- // then no need to release the mutator lock.
- if (barrier_count == 0) {
- return;
- }
// Release locks then wait for all mutator threads to pass the barrier.
Locks::mutator_lock_->SharedUnlock(self);
- {
- ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
- if (kIsDebugBuild) {
- static constexpr uint64_t kEmptyCheckpointTimeoutMs = 600 * 1000; // 10 minutes.
- bool timed_out = barrier->Increment(self, barrier_count, kEmptyCheckpointTimeoutMs);
- if (timed_out) {
- std::ostringstream ss;
- ss << "Empty checkpoint timeout\n";
- ss << "Barrier count " << barrier->GetCount(self) << "\n";
- ss << "Runnable thread IDs";
- for (uint32_t tid : runnable_thread_ids) {
- ss << " " << tid;
- }
- ss << "\n";
- Locks::mutator_lock_->Dump(ss);
- ss << "\n";
- LOG(FATAL_WITHOUT_ABORT) << ss.str();
- // Some threads in 'runnable_thread_ids' are probably stuck. Try to dump their stacks.
- // Avoid using ThreadList::Dump() initially because it is likely to get stuck as well.
- {
- ScopedObjectAccess soa(self);
- MutexLock mu1(self, *Locks::thread_list_lock_);
- for (Thread* thread : thread_list->GetList()) {
- uint32_t tid = thread->GetThreadId();
- bool is_in_runnable_thread_ids =
- std::find(runnable_thread_ids.begin(), runnable_thread_ids.end(), tid) !=
- runnable_thread_ids.end();
- if (is_in_runnable_thread_ids &&
- thread->ReadFlag(kEmptyCheckpointRequest)) {
- // Found a runnable thread that hasn't responded to the empty checkpoint request.
- // Assume it's stuck and safe to dump its stack.
- thread->Dump(LOG_STREAM(FATAL_WITHOUT_ABORT),
- /*dump_native_stack*/ true,
- /*backtrace_map*/ nullptr,
- /*force_dump_stack*/ true);
- }
- }
- }
- LOG(FATAL_WITHOUT_ABORT)
- << "Dumped runnable threads that haven't responded to empty checkpoint.";
- // Now use ThreadList::Dump() to dump more threads, noting it may get stuck.
- thread_list->Dump(LOG_STREAM(FATAL_WITHOUT_ABORT));
- LOG(FATAL) << "Dumped all threads.";
- }
- } else {
- barrier->Increment(self, barrier_count);
- }
- }
+ thread_list->RunEmptyCheckpoint();
Locks::mutator_lock_->SharedLock(self);
}
diff --git a/runtime/gc/reference_processor.cc b/runtime/gc/reference_processor.cc
index c1548365c7..86b152211c 100644
--- a/runtime/gc/reference_processor.cc
+++ b/runtime/gc/reference_processor.cc
@@ -104,7 +104,7 @@ ObjPtr<mirror::Object> ReferenceProcessor::GetReferent(Thread* self,
}
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
// presence of threads blocking for weak ref access.
- self->CheckEmptyCheckpoint();
+ self->CheckEmptyCheckpointFromWeakRefAccess(Locks::reference_processor_lock_);
condition_.WaitHoldingLocks(self);
}
return reference->GetReferent();
@@ -292,7 +292,7 @@ void ReferenceProcessor::WaitUntilDoneProcessingReferences(Thread* self) {
(kUseReadBarrier && !self->GetWeakRefAccessEnabled())) {
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
// presence of threads blocking for weak ref access.
- self->CheckEmptyCheckpoint();
+ self->CheckEmptyCheckpointFromWeakRefAccess(Locks::reference_processor_lock_);
condition_.WaitHoldingLocks(self);
}
}
diff --git a/runtime/gc/reference_queue_test.cc b/runtime/gc/reference_queue_test.cc
index 3ca3353562..613b034f59 100644
--- a/runtime/gc/reference_queue_test.cc
+++ b/runtime/gc/reference_queue_test.cc
@@ -38,11 +38,11 @@ TEST_F(ReferenceQueueTest, EnqueueDequeue) {
auto ref_class = hs.NewHandle(
Runtime::Current()->GetClassLinker()->FindClass(self, "Ljava/lang/ref/WeakReference;",
ScopedNullHandle<mirror::ClassLoader>()));
- ASSERT_TRUE(ref_class.Get() != nullptr);
+ ASSERT_TRUE(ref_class != nullptr);
auto ref1(hs.NewHandle(ref_class->AllocObject(self)->AsReference()));
- ASSERT_TRUE(ref1.Get() != nullptr);
+ ASSERT_TRUE(ref1 != nullptr);
auto ref2(hs.NewHandle(ref_class->AllocObject(self)->AsReference()));
- ASSERT_TRUE(ref2.Get() != nullptr);
+ ASSERT_TRUE(ref2 != nullptr);
queue.EnqueueReference(ref1.Get());
ASSERT_TRUE(!queue.IsEmpty());
ASSERT_EQ(queue.GetLength(), 1U);
@@ -73,15 +73,15 @@ TEST_F(ReferenceQueueTest, Dump) {
auto weak_ref_class = hs.NewHandle(
Runtime::Current()->GetClassLinker()->FindClass(self, "Ljava/lang/ref/WeakReference;",
ScopedNullHandle<mirror::ClassLoader>()));
- ASSERT_TRUE(weak_ref_class.Get() != nullptr);
+ ASSERT_TRUE(weak_ref_class != nullptr);
auto finalizer_ref_class = hs.NewHandle(
Runtime::Current()->GetClassLinker()->FindClass(self, "Ljava/lang/ref/FinalizerReference;",
ScopedNullHandle<mirror::ClassLoader>()));
- ASSERT_TRUE(finalizer_ref_class.Get() != nullptr);
+ ASSERT_TRUE(finalizer_ref_class != nullptr);
auto ref1(hs.NewHandle(weak_ref_class->AllocObject(self)->AsReference()));
- ASSERT_TRUE(ref1.Get() != nullptr);
+ ASSERT_TRUE(ref1 != nullptr);
auto ref2(hs.NewHandle(finalizer_ref_class->AllocObject(self)->AsReference()));
- ASSERT_TRUE(ref2.Get() != nullptr);
+ ASSERT_TRUE(ref2 != nullptr);
queue.EnqueueReference(ref1.Get());
oss.str("");
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index ffbca525d9..2163a20e87 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -587,15 +587,18 @@ class ImageSpaceLoader {
}
std::unique_ptr<MemMap> map;
+
// GetImageBegin is the preferred address to map the image. If we manage to map the
// image at the image begin, the amount of fixup work required is minimized.
+ // If it is pic we will retry with error_msg for the failure case. Pass a null error_msg to
+ // avoid reading proc maps for a mapping failure and slowing everything down.
map.reset(LoadImageFile(image_filename,
image_location,
*image_header,
image_header->GetImageBegin(),
file->Fd(),
logger,
- error_msg));
+ image_header->IsPic() ? nullptr : error_msg));
// If the header specifies PIC mode, we can also map at a random low_4gb address since we can
// relocate in-place.
if (map == nullptr && image_header->IsPic()) {
@@ -689,7 +692,7 @@ class ImageSpaceLoader {
if (validate_oat_file) {
TimingLogger::ScopedTiming timing("ValidateOatFile", &logger);
CHECK(space->oat_file_ != nullptr);
- if (!ValidateOatFile(*space, *space->oat_file_, error_msg)) {
+ if (!ImageSpace::ValidateOatFile(*space->oat_file_, error_msg)) {
DCHECK(!error_msg->empty());
return nullptr;
}
@@ -765,8 +768,10 @@ class ImageSpaceLoader {
if (storage_mode != ImageHeader::kStorageModeLZ4 &&
storage_mode != ImageHeader::kStorageModeLZ4HC) {
- *error_msg = StringPrintf("Invalid storage mode in image header %d",
- static_cast<int>(storage_mode));
+ if (error_msg != nullptr) {
+ *error_msg = StringPrintf("Invalid storage mode in image header %d",
+ static_cast<int>(storage_mode));
+ }
return nullptr;
}
@@ -790,7 +795,7 @@ class ImageSpaceLoader {
image_filename,
error_msg));
if (temp_map == nullptr) {
- DCHECK(!error_msg->empty());
+ DCHECK(error_msg == nullptr || !error_msg->empty());
return nullptr;
}
memcpy(map->Begin(), &image_header, sizeof(ImageHeader));
@@ -802,12 +807,18 @@ class ImageSpaceLoader {
reinterpret_cast<char*>(map->Begin()) + decompress_offset,
stored_size,
map->Size() - decompress_offset);
- VLOG(image) << "Decompressing image took " << PrettyDuration(NanoTime() - start);
+ const uint64_t time = NanoTime() - start;
+ // Add one 1 ns to prevent possible divide by 0.
+ VLOG(image) << "Decompressing image took " << PrettyDuration(time) << " ("
+ << PrettySize(static_cast<uint64_t>(map->Size()) * MsToNs(1000) / (time + 1))
+ << "/s)";
if (decompressed_size + sizeof(ImageHeader) != image_header.GetImageSize()) {
- *error_msg = StringPrintf(
- "Decompressed size does not match expected image size %zu vs %zu",
- decompressed_size + sizeof(ImageHeader),
- image_header.GetImageSize());
+ if (error_msg != nullptr) {
+ *error_msg = StringPrintf(
+ "Decompressed size does not match expected image size %zu vs %zu",
+ decompressed_size + sizeof(ImageHeader),
+ image_header.GetImageSize());
+ }
return nullptr;
}
}
@@ -1272,6 +1283,14 @@ class ImageSpaceLoader {
}
dex_cache->FixupResolvedMethodTypes<kWithoutReadBarrier>(new_method_types, fixup_adapter);
}
+ GcRoot<mirror::CallSite>* call_sites = dex_cache->GetResolvedCallSites();
+ if (call_sites != nullptr) {
+ GcRoot<mirror::CallSite>* new_call_sites = fixup_adapter.ForwardObject(call_sites);
+ if (call_sites != new_call_sites) {
+ dex_cache->SetResolvedCallSites(new_call_sites);
+ }
+ dex_cache->FixupResolvedCallSites<kWithoutReadBarrier>(new_call_sites, fixup_adapter);
+ }
}
}
{
@@ -1368,33 +1387,6 @@ class ImageSpaceLoader {
return oat_file;
}
-
- static bool ValidateOatFile(const ImageSpace& space,
- const OatFile& oat_file,
- std::string* error_msg) {
- for (const OatFile::OatDexFile* oat_dex_file : oat_file.GetOatDexFiles()) {
- const std::string& dex_file_location = oat_dex_file->GetDexFileLocation();
- uint32_t dex_file_location_checksum;
- if (!DexFile::GetChecksum(dex_file_location.c_str(), &dex_file_location_checksum, error_msg)) {
- *error_msg = StringPrintf("Failed to get checksum of dex file '%s' referenced by image %s: "
- "%s",
- dex_file_location.c_str(),
- space.GetName(),
- error_msg->c_str());
- return false;
- }
- if (dex_file_location_checksum != oat_dex_file->GetDexFileLocationChecksum()) {
- *error_msg = StringPrintf("ValidateOatFile found checksum mismatch between oat file '%s' and "
- "dex file '%s' (0x%x != 0x%x)",
- oat_file.GetLocation().c_str(),
- dex_file_location.c_str(),
- oat_dex_file->GetDexFileLocationChecksum(),
- dex_file_location_checksum);
- return false;
- }
- }
- return true;
- }
};
static constexpr uint64_t kLowSpaceValue = 50 * MB;
@@ -1771,6 +1763,63 @@ std::string ImageSpace::GetMultiImageBootClassPath(
return bootcp_oss.str();
}
+bool ImageSpace::ValidateOatFile(const OatFile& oat_file, std::string* error_msg) {
+ for (const OatFile::OatDexFile* oat_dex_file : oat_file.GetOatDexFiles()) {
+ const std::string& dex_file_location = oat_dex_file->GetDexFileLocation();
+
+ // Skip multidex locations - These will be checked when we visit their
+ // corresponding primary non-multidex location.
+ if (DexFile::IsMultiDexLocation(dex_file_location.c_str())) {
+ continue;
+ }
+
+ std::vector<uint32_t> checksums;
+ if (!DexFile::GetMultiDexChecksums(dex_file_location.c_str(), &checksums, error_msg)) {
+ *error_msg = StringPrintf("ValidateOatFile failed to get checksums of dex file '%s' "
+ "referenced by oat file %s: %s",
+ dex_file_location.c_str(),
+ oat_file.GetLocation().c_str(),
+ error_msg->c_str());
+ return false;
+ }
+ CHECK(!checksums.empty());
+ if (checksums[0] != oat_dex_file->GetDexFileLocationChecksum()) {
+ *error_msg = StringPrintf("ValidateOatFile found checksum mismatch between oat file "
+ "'%s' and dex file '%s' (0x%x != 0x%x)",
+ oat_file.GetLocation().c_str(),
+ dex_file_location.c_str(),
+ oat_dex_file->GetDexFileLocationChecksum(),
+ checksums[0]);
+ return false;
+ }
+
+ // Verify checksums for any related multidex entries.
+ for (size_t i = 1; i < checksums.size(); i++) {
+ std::string multi_dex_location = DexFile::GetMultiDexLocation(i, dex_file_location.c_str());
+ const OatFile::OatDexFile* multi_dex = oat_file.GetOatDexFile(multi_dex_location.c_str(),
+ nullptr,
+ error_msg);
+ if (multi_dex == nullptr) {
+ *error_msg = StringPrintf("ValidateOatFile oat file '%s' is missing entry '%s'",
+ oat_file.GetLocation().c_str(),
+ multi_dex_location.c_str());
+ return false;
+ }
+
+ if (checksums[i] != multi_dex->GetDexFileLocationChecksum()) {
+ *error_msg = StringPrintf("ValidateOatFile found checksum mismatch between oat file "
+ "'%s' and dex file '%s' (0x%x != 0x%x)",
+ oat_file.GetLocation().c_str(),
+ multi_dex_location.c_str(),
+ multi_dex->GetDexFileLocationChecksum(),
+ checksums[i]);
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
void ImageSpace::ExtractMultiImageLocations(const std::string& input_image_file_name,
const std::string& boot_classpath,
std::vector<std::string>* image_file_names) {
diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h
index 489a2890fe..199bbdd00a 100644
--- a/runtime/gc/space/image_space.h
+++ b/runtime/gc/space/image_space.h
@@ -131,6 +131,17 @@ class ImageSpace : public MemMapSpace {
const std::vector<const char*>& oat_filenames,
const std::vector<const char*>& image_filenames);
+ // Returns true if the dex checksums in the given oat file match the
+ // checksums of the original dex files on disk. This is intended to be used
+ // to validate the boot image oat file, which may contain dex entries from
+ // multiple different (possibly multidex) dex files on disk. Prefer the
+ // OatFileAssistant for validating regular app oat files because the
+ // OatFileAssistant caches dex checksums that are reused to check both the
+ // oat and odex file.
+ //
+ // This function is exposed for testing purposes.
+ static bool ValidateOatFile(const OatFile& oat_file, std::string* error_msg);
+
// Return the end of the image which includes non-heap objects such as ArtMethods and ArtFields.
uint8_t* GetImageEnd() const {
return Begin() + GetImageHeader().GetImageSize();
diff --git a/runtime/gc/space/image_space_test.cc b/runtime/gc/space/image_space_test.cc
new file mode 100644
index 0000000000..7a380746a1
--- /dev/null
+++ b/runtime/gc/space/image_space_test.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#include "dexopt_test.h"
+
+namespace art {
+namespace gc {
+namespace space {
+
+TEST_F(DexoptTest, ValidateOatFile) {
+ std::string dex1 = GetScratchDir() + "/Dex1.jar";
+ std::string multidex1 = GetScratchDir() + "/MultiDex1.jar";
+ std::string dex2 = GetScratchDir() + "/Dex2.jar";
+ std::string oat_location = GetScratchDir() + "/Oat.oat";
+
+ Copy(GetDexSrc1(), dex1);
+ Copy(GetMultiDexSrc1(), multidex1);
+ Copy(GetDexSrc2(), dex2);
+
+ std::string error_msg;
+ std::vector<std::string> args;
+ args.push_back("--dex-file=" + dex1);
+ args.push_back("--dex-file=" + multidex1);
+ args.push_back("--dex-file=" + dex2);
+ args.push_back("--oat-file=" + oat_location);
+ ASSERT_TRUE(OatFileAssistant::Dex2Oat(args, &error_msg)) << error_msg;
+
+ std::unique_ptr<OatFile> oat(OatFile::Open(oat_location.c_str(),
+ oat_location.c_str(),
+ nullptr,
+ nullptr,
+ false,
+ /*low_4gb*/false,
+ nullptr,
+ &error_msg));
+ ASSERT_TRUE(oat != nullptr) << error_msg;
+
+ // Originally all the dex checksums should be up to date.
+ EXPECT_TRUE(ImageSpace::ValidateOatFile(*oat, &error_msg)) << error_msg;
+
+ // Invalidate the dex1 checksum.
+ Copy(GetDexSrc2(), dex1);
+ EXPECT_FALSE(ImageSpace::ValidateOatFile(*oat, &error_msg));
+
+ // Restore the dex1 checksum.
+ Copy(GetDexSrc1(), dex1);
+ EXPECT_TRUE(ImageSpace::ValidateOatFile(*oat, &error_msg)) << error_msg;
+
+ // Invalidate the non-main multidex checksum.
+ Copy(GetMultiDexSrc2(), multidex1);
+ EXPECT_FALSE(ImageSpace::ValidateOatFile(*oat, &error_msg));
+
+ // Restore the multidex checksum.
+ Copy(GetMultiDexSrc1(), multidex1);
+ EXPECT_TRUE(ImageSpace::ValidateOatFile(*oat, &error_msg)) << error_msg;
+
+ // Invalidate the dex2 checksum.
+ Copy(GetDexSrc1(), dex2);
+ EXPECT_FALSE(ImageSpace::ValidateOatFile(*oat, &error_msg));
+
+ // restore the dex2 checksum.
+ Copy(GetDexSrc2(), dex2);
+ EXPECT_TRUE(ImageSpace::ValidateOatFile(*oat, &error_msg)) << error_msg;
+
+ // Replace the multidex file with a non-multidex file.
+ Copy(GetDexSrc1(), multidex1);
+ EXPECT_FALSE(ImageSpace::ValidateOatFile(*oat, &error_msg));
+
+ // Restore the multidex file
+ Copy(GetMultiDexSrc1(), multidex1);
+ EXPECT_TRUE(ImageSpace::ValidateOatFile(*oat, &error_msg)) << error_msg;
+
+ // Replace dex1 with a multidex file.
+ Copy(GetMultiDexSrc1(), dex1);
+ EXPECT_FALSE(ImageSpace::ValidateOatFile(*oat, &error_msg));
+
+ // Restore the dex1 file.
+ Copy(GetDexSrc1(), dex1);
+ EXPECT_TRUE(ImageSpace::ValidateOatFile(*oat, &error_msg)) << error_msg;
+
+ // Remove the dex2 file.
+ EXPECT_EQ(0, unlink(dex2.c_str()));
+ EXPECT_FALSE(ImageSpace::ValidateOatFile(*oat, &error_msg));
+
+ // Restore the dex2 file.
+ Copy(GetDexSrc2(), dex2);
+ EXPECT_TRUE(ImageSpace::ValidateOatFile(*oat, &error_msg)) << error_msg;
+
+ // Remove the multidex file.
+ EXPECT_EQ(0, unlink(multidex1.c_str()));
+ EXPECT_FALSE(ImageSpace::ValidateOatFile(*oat, &error_msg));
+}
+
+} // namespace space
+} // namespace gc
+} // namespace art
diff --git a/runtime/gc/space/space_create_test.cc b/runtime/gc/space/space_create_test.cc
index 7bc4dc40e4..ca5f306264 100644
--- a/runtime/gc/space/space_create_test.cc
+++ b/runtime/gc/space/space_create_test.cc
@@ -108,7 +108,7 @@ TEST_P(SpaceCreateTest, ZygoteSpaceTestBody) {
&ptr1_bytes_allocated,
&ptr1_usable_size,
&ptr1_bytes_tl_bulk_allocated)));
- EXPECT_TRUE(ptr1.Get() != nullptr);
+ EXPECT_TRUE(ptr1 != nullptr);
EXPECT_LE(1U * MB, ptr1_bytes_allocated);
EXPECT_LE(1U * MB, ptr1_usable_size);
EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
@@ -126,7 +126,7 @@ TEST_P(SpaceCreateTest, ZygoteSpaceTestBody) {
&ptr3_bytes_allocated,
&ptr3_usable_size,
&ptr3_bytes_tl_bulk_allocated)));
- EXPECT_TRUE(ptr3.Get() != nullptr);
+ EXPECT_TRUE(ptr3 != nullptr);
EXPECT_LE(8U * MB, ptr3_bytes_allocated);
EXPECT_LE(8U * MB, ptr3_usable_size);
EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
@@ -154,7 +154,7 @@ TEST_P(SpaceCreateTest, ZygoteSpaceTestBody) {
&ptr6_bytes_allocated,
&ptr6_usable_size,
&ptr6_bytes_tl_bulk_allocated)));
- EXPECT_TRUE(ptr6.Get() != nullptr);
+ EXPECT_TRUE(ptr6 != nullptr);
EXPECT_LE(9U * MB, ptr6_bytes_allocated);
EXPECT_LE(9U * MB, ptr6_usable_size);
EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
@@ -193,7 +193,7 @@ TEST_P(SpaceCreateTest, ZygoteSpaceTestBody) {
&ptr1_bytes_allocated,
&ptr1_usable_size,
&ptr1_bytes_tl_bulk_allocated));
- EXPECT_TRUE(ptr1.Get() != nullptr);
+ EXPECT_TRUE(ptr1 != nullptr);
EXPECT_LE(1U * MB, ptr1_bytes_allocated);
EXPECT_LE(1U * MB, ptr1_usable_size);
EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
@@ -210,7 +210,7 @@ TEST_P(SpaceCreateTest, ZygoteSpaceTestBody) {
&ptr3_bytes_allocated,
&ptr3_usable_size,
&ptr3_bytes_tl_bulk_allocated));
- EXPECT_TRUE(ptr3.Get() != nullptr);
+ EXPECT_TRUE(ptr3 != nullptr);
EXPECT_LE(2U * MB, ptr3_bytes_allocated);
EXPECT_LE(2U * MB, ptr3_usable_size);
EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
@@ -242,7 +242,7 @@ TEST_P(SpaceCreateTest, AllocAndFreeTestBody) {
&ptr1_bytes_allocated,
&ptr1_usable_size,
&ptr1_bytes_tl_bulk_allocated)));
- EXPECT_TRUE(ptr1.Get() != nullptr);
+ EXPECT_TRUE(ptr1 != nullptr);
EXPECT_LE(1U * MB, ptr1_bytes_allocated);
EXPECT_LE(1U * MB, ptr1_usable_size);
EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
@@ -260,7 +260,7 @@ TEST_P(SpaceCreateTest, AllocAndFreeTestBody) {
&ptr3_bytes_allocated,
&ptr3_usable_size,
&ptr3_bytes_tl_bulk_allocated)));
- EXPECT_TRUE(ptr3.Get() != nullptr);
+ EXPECT_TRUE(ptr3 != nullptr);
EXPECT_LE(8U * MB, ptr3_bytes_allocated);
EXPECT_LE(8U * MB, ptr3_usable_size);
EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
@@ -288,7 +288,7 @@ TEST_P(SpaceCreateTest, AllocAndFreeTestBody) {
&ptr6_bytes_allocated,
&ptr6_usable_size,
&ptr6_bytes_tl_bulk_allocated)));
- EXPECT_TRUE(ptr6.Get() != nullptr);
+ EXPECT_TRUE(ptr6 != nullptr);
EXPECT_LE(9U * MB, ptr6_bytes_allocated);
EXPECT_LE(9U * MB, ptr6_usable_size);
EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
diff --git a/runtime/gc/space/space_test.h b/runtime/gc/space/space_test.h
index cbb3d73497..1fe3fb2e86 100644
--- a/runtime/gc/space/space_test.h
+++ b/runtime/gc/space/space_test.h
@@ -200,7 +200,7 @@ void SpaceTest<Super>::SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space,
}
footprint = space->GetFootprint();
EXPECT_GE(space->Size(), footprint); // invariant
- if (object.Get() != nullptr) { // allocation succeeded
+ if (object != nullptr) { // allocation succeeded
lots_of_objects[i] = object.Get();
size_t allocation_size = space->AllocationSize(object.Get(), nullptr);
EXPECT_EQ(bytes_allocated, allocation_size);
@@ -296,7 +296,7 @@ void SpaceTest<Super>::SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space,
large_object.Assign(AllocWithGrowth(space, self, three_quarters_space, &bytes_allocated,
nullptr, &bytes_tl_bulk_allocated));
}
- EXPECT_TRUE(large_object.Get() != nullptr);
+ EXPECT_TRUE(large_object != nullptr);
// Sanity check footprint
footprint = space->GetFootprint();
diff --git a/runtime/gc/system_weak.h b/runtime/gc/system_weak.h
index e5cddfc6f9..60105f4e4f 100644
--- a/runtime/gc/system_weak.h
+++ b/runtime/gc/system_weak.h
@@ -82,7 +82,7 @@ class SystemWeakHolder : public AbstractSystemWeakHolder {
(kUseReadBarrier && !self->GetWeakRefAccessEnabled()))) {
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
// presence of threads blocking for weak ref access.
- self->CheckEmptyCheckpoint();
+ self->CheckEmptyCheckpointFromWeakRefAccess(&allow_disallow_lock_);
new_weak_condition_.WaitHoldingLocks(self);
}
}
diff --git a/runtime/handle.h b/runtime/handle.h
index e4b6d29a55..ccff575495 100644
--- a/runtime/handle.h
+++ b/runtime/handle.h
@@ -81,6 +81,14 @@ class Handle : public ValueObject {
return reference_;
}
+ ALWAYS_INLINE bool operator!=(std::nullptr_t) const REQUIRES_SHARED(Locks::mutator_lock_) {
+ return !IsNull();
+ }
+
+ ALWAYS_INLINE bool operator==(std::nullptr_t) const REQUIRES_SHARED(Locks::mutator_lock_) {
+ return IsNull();
+ }
+
protected:
template<typename S>
explicit Handle(StackReference<S>* reference)
diff --git a/runtime/imtable_test.cc b/runtime/imtable_test.cc
index 8cbe2916ec..17149dfe44 100644
--- a/runtime/imtable_test.cc
+++ b/runtime/imtable_test.cc
@@ -53,7 +53,7 @@ class ImTableTest : public CommonRuntimeTest {
ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(jclass_loader_a)));
Handle<mirror::Class> h_class_a(
hs.NewHandle(class_linker->FindClass(self, class_name.c_str(), h_class_loader)));
- if (h_class_a.Get() == nullptr) {
+ if (h_class_a == nullptr) {
LOG(ERROR) << self->GetException()->Dump();
CHECK(false) << "h_class_a == nullptr";
}
@@ -63,7 +63,7 @@ class ImTableTest : public CommonRuntimeTest {
ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(jclass_loader_b)));
Handle<mirror::Class> h_class_b(
hs.NewHandle(class_linker->FindClass(self, class_name.c_str(), h_class_loader)));
- if (h_class_b.Get() == nullptr) {
+ if (h_class_b == nullptr) {
LOG(ERROR) << self->GetException()->Dump();
CHECK(false) << "h_class_b == nullptr";
}
diff --git a/runtime/indirect_reference_table_test.cc b/runtime/indirect_reference_table_test.cc
index bf4cab24cc..6aefe239a9 100644
--- a/runtime/indirect_reference_table_test.cc
+++ b/runtime/indirect_reference_table_test.cc
@@ -64,13 +64,13 @@ TEST_F(IndirectReferenceTableTest, BasicTest) {
StackHandleScope<4> hs(soa.Self());
ASSERT_TRUE(c != nullptr);
Handle<mirror::Object> obj0 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj0.Get() != nullptr);
+ ASSERT_TRUE(obj0 != nullptr);
Handle<mirror::Object> obj1 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj1.Get() != nullptr);
+ ASSERT_TRUE(obj1 != nullptr);
Handle<mirror::Object> obj2 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj2.Get() != nullptr);
+ ASSERT_TRUE(obj2 != nullptr);
Handle<mirror::Object> obj3 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj3.Get() != nullptr);
+ ASSERT_TRUE(obj3 != nullptr);
const IRTSegmentState cookie = kIRTFirstSegment;
@@ -282,15 +282,15 @@ TEST_F(IndirectReferenceTableTest, Holes) {
StackHandleScope<5> hs(soa.Self());
ASSERT_TRUE(c != nullptr);
Handle<mirror::Object> obj0 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj0.Get() != nullptr);
+ ASSERT_TRUE(obj0 != nullptr);
Handle<mirror::Object> obj1 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj1.Get() != nullptr);
+ ASSERT_TRUE(obj1 != nullptr);
Handle<mirror::Object> obj2 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj2.Get() != nullptr);
+ ASSERT_TRUE(obj2 != nullptr);
Handle<mirror::Object> obj3 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj3.Get() != nullptr);
+ ASSERT_TRUE(obj3 != nullptr);
Handle<mirror::Object> obj4 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj4.Get() != nullptr);
+ ASSERT_TRUE(obj4 != nullptr);
std::string error_msg;
@@ -491,7 +491,7 @@ TEST_F(IndirectReferenceTableTest, Resize) {
StackHandleScope<1> hs(soa.Self());
ASSERT_TRUE(c != nullptr);
Handle<mirror::Object> obj0 = hs.NewHandle(c->AllocObject(soa.Self()));
- ASSERT_TRUE(obj0.Get() != nullptr);
+ ASSERT_TRUE(obj0 != nullptr);
std::string error_msg;
IndirectReferenceTable irt(kTableMax,
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index f11e2cba10..d862ff2708 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -1010,15 +1010,18 @@ void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_o
void Instrumentation::ExceptionCaughtEvent(Thread* thread,
mirror::Throwable* exception_object) const {
+ Thread* self = Thread::Current();
+ StackHandleScope<1> hs(self);
+ Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
if (HasExceptionCaughtListeners()) {
- DCHECK_EQ(thread->GetException(), exception_object);
+ DCHECK_EQ(thread->GetException(), h_exception.Get());
thread->ClearException();
for (InstrumentationListener* listener : exception_caught_listeners_) {
if (listener != nullptr) {
- listener->ExceptionCaught(thread, exception_object);
+ listener->ExceptionCaught(thread, h_exception.Get());
}
}
- thread->SetException(exception_object);
+ thread->SetException(h_exception.Get());
}
}
diff --git a/runtime/intern_table_test.cc b/runtime/intern_table_test.cc
index 3991d6550d..f0d0260482 100644
--- a/runtime/intern_table_test.cc
+++ b/runtime/intern_table_test.cc
@@ -36,10 +36,10 @@ TEST_F(InternTableTest, Intern) {
Handle<mirror::String> foo_3(
hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "foo")));
Handle<mirror::String> bar(hs.NewHandle(intern_table.InternStrong(3, "bar")));
- ASSERT_TRUE(foo_1.Get() != nullptr);
- ASSERT_TRUE(foo_2.Get() != nullptr);
- ASSERT_TRUE(foo_3.Get() != nullptr);
- ASSERT_TRUE(bar.Get() != nullptr);
+ ASSERT_TRUE(foo_1 != nullptr);
+ ASSERT_TRUE(foo_2 != nullptr);
+ ASSERT_TRUE(foo_3 != nullptr);
+ ASSERT_TRUE(bar != nullptr);
EXPECT_EQ(foo_1.Get(), foo_2.Get());
EXPECT_TRUE(foo_1->Equals("foo"));
EXPECT_TRUE(foo_2->Equals("foo"));
@@ -204,9 +204,9 @@ TEST_F(InternTableTest, LookupStrong) {
Handle<mirror::String> foo(hs.NewHandle(intern_table.InternStrong(3, "foo")));
Handle<mirror::String> bar(hs.NewHandle(intern_table.InternStrong(3, "bar")));
Handle<mirror::String> foobar(hs.NewHandle(intern_table.InternStrong(6, "foobar")));
- ASSERT_TRUE(foo.Get() != nullptr);
- ASSERT_TRUE(bar.Get() != nullptr);
- ASSERT_TRUE(foobar.Get() != nullptr);
+ ASSERT_TRUE(foo != nullptr);
+ ASSERT_TRUE(bar != nullptr);
+ ASSERT_TRUE(foobar != nullptr);
ASSERT_TRUE(foo->Equals("foo"));
ASSERT_TRUE(bar->Equals("bar"));
ASSERT_TRUE(foobar->Equals("foobar"));
diff --git a/runtime/interpreter/interpreter_common.cc b/runtime/interpreter/interpreter_common.cc
index 28bcb97105..8978bfd5af 100644
--- a/runtime/interpreter/interpreter_common.cc
+++ b/runtime/interpreter/interpreter_common.cc
@@ -519,7 +519,7 @@ void SetStringInitValueToAllAliases(ShadowFrame* shadow_frame,
}
}
-template<bool is_range, bool do_access_check>
+template<bool is_range>
bool DoInvokePolymorphic(Thread* self,
ShadowFrame& shadow_frame,
const Instruction* inst,
@@ -539,10 +539,10 @@ bool DoInvokePolymorphic(Thread* self,
// was symbolically invoked in bytecode (say MethodHandle.invoke or MethodHandle.invokeExact)
// and not the method that we'll dispatch to in the end.
StackHandleScope<5> hs(self);
- Handle<mirror::MethodHandleImpl> method_handle(hs.NewHandle(
- ObjPtr<mirror::MethodHandleImpl>::DownCast(
+ Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
+ ObjPtr<mirror::MethodHandle>::DownCast(
MakeObjPtr(shadow_frame.GetVRegReference(vRegC)))));
- if (UNLIKELY(method_handle.Get() == nullptr)) {
+ if (UNLIKELY(method_handle == nullptr)) {
// Note that the invoke type is kVirtual here because a call to a signature
// polymorphic method is shaped like a virtual call at the bytecode level.
ThrowNullPointerExceptionForMethodAccess(invoke_method_idx, InvokeType::kVirtual);
@@ -564,7 +564,7 @@ bool DoInvokePolymorphic(Thread* self,
hs.NewHandle<mirror::ClassLoader>(caller_class->GetClassLoader()))));
// This implies we couldn't resolve one or more types in this method handle.
- if (UNLIKELY(callsite_type.Get() == nullptr)) {
+ if (UNLIKELY(callsite_type == nullptr)) {
CHECK(self->IsExceptionPending());
return false;
}
@@ -584,31 +584,300 @@ bool DoInvokePolymorphic(Thread* self,
// VRegC is the register holding the method handle. Arguments passed
// to the method handle's target do not include the method handle.
uint32_t first_arg = inst->VRegC_4rcc() + 1;
- return DoInvokePolymorphic<is_range, do_access_check>(self,
- invoke_method,
- shadow_frame,
- method_handle,
- callsite_type,
- args /* unused */,
- first_arg,
- result);
+ return DoInvokePolymorphic<is_range>(self,
+ invoke_method,
+ shadow_frame,
+ method_handle,
+ callsite_type,
+ args /* unused */,
+ first_arg,
+ result);
} else {
// Get the register arguments for the invoke.
inst->GetVarArgs(args, inst_data);
// Drop the first register which is the method handle performing the invoke.
memmove(args, args + 1, sizeof(args[0]) * (Instruction::kMaxVarArgRegs - 1));
args[Instruction::kMaxVarArgRegs - 1] = 0;
- return DoInvokePolymorphic<is_range, do_access_check>(self,
- invoke_method,
- shadow_frame,
- method_handle,
- callsite_type,
- args,
- args[0],
- result);
+ return DoInvokePolymorphic<is_range>(self,
+ invoke_method,
+ shadow_frame,
+ method_handle,
+ callsite_type,
+ args,
+ args[0],
+ result);
}
}
+static ObjPtr<mirror::CallSite> InvokeBootstrapMethod(Thread* self,
+ ShadowFrame& shadow_frame,
+ uint32_t call_site_idx)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* referrer = shadow_frame.GetMethod();
+ const DexFile* dex_file = referrer->GetDexFile();
+ const DexFile::CallSiteIdItem& csi = dex_file->GetCallSiteId(call_site_idx);
+
+ StackHandleScope<9> hs(self);
+ Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
+ Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
+
+ CallSiteArrayValueIterator it(*dex_file, csi);
+ uint32_t method_handle_idx = static_cast<uint32_t>(it.GetJavaValue().i);
+ ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
+ Handle<mirror::MethodHandle>
+ bootstrap(hs.NewHandle(class_linker->ResolveMethodHandle(method_handle_idx, referrer)));
+ if (bootstrap.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ Handle<mirror::MethodType> bootstrap_method_type = hs.NewHandle(bootstrap->GetMethodType());
+ it.Next();
+
+ DCHECK_EQ(static_cast<size_t>(bootstrap->GetMethodType()->GetPTypes()->GetLength()), it.Size());
+ const size_t num_bootstrap_vregs = bootstrap->GetMethodType()->NumberOfVRegs();
+
+ // Set-up a shadow frame for invoking the bootstrap method handle.
+ ShadowFrameAllocaUniquePtr bootstrap_frame =
+ CREATE_SHADOW_FRAME(num_bootstrap_vregs, nullptr, referrer, shadow_frame.GetDexPC());
+ ScopedStackedShadowFramePusher pusher(
+ self, bootstrap_frame.get(), StackedShadowFrameType::kShadowFrameUnderConstruction);
+ size_t vreg = 0;
+
+ // The first parameter is a MethodHandles lookup instance.
+ {
+ Handle<mirror::Class> lookup_class(hs.NewHandle(bootstrap->GetTargetClass()));
+ ObjPtr<mirror::MethodHandlesLookup> lookup =
+ mirror::MethodHandlesLookup::Create(self, lookup_class);
+ if (lookup.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ bootstrap_frame->SetVRegReference(vreg++, lookup.Ptr());
+ }
+
+ // The second parameter is the name to lookup.
+ {
+ dex::StringIndex name_idx(static_cast<uint32_t>(it.GetJavaValue().i));
+ ObjPtr<mirror::String> name = class_linker->ResolveString(*dex_file, name_idx, dex_cache);
+ if (name.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ bootstrap_frame->SetVRegReference(vreg++, name.Ptr());
+ }
+ it.Next();
+
+ // The third parameter is the method type associated with the name.
+ uint32_t method_type_idx = static_cast<uint32_t>(it.GetJavaValue().i);
+ Handle<mirror::MethodType>
+ method_type(hs.NewHandle(class_linker->ResolveMethodType(*dex_file,
+ method_type_idx,
+ dex_cache,
+ class_loader)));
+ if (method_type.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ bootstrap_frame->SetVRegReference(vreg++, method_type.Get());
+ it.Next();
+
+ // Append remaining arguments (if any).
+ while (it.HasNext()) {
+ const jvalue& jvalue = it.GetJavaValue();
+ switch (it.GetValueType()) {
+ case EncodedArrayValueIterator::ValueType::kBoolean:
+ case EncodedArrayValueIterator::ValueType::kByte:
+ case EncodedArrayValueIterator::ValueType::kChar:
+ case EncodedArrayValueIterator::ValueType::kShort:
+ case EncodedArrayValueIterator::ValueType::kInt:
+ bootstrap_frame->SetVReg(vreg, jvalue.i);
+ vreg += 1;
+ break;
+ case EncodedArrayValueIterator::ValueType::kLong:
+ bootstrap_frame->SetVRegLong(vreg, jvalue.j);
+ vreg += 2;
+ break;
+ case EncodedArrayValueIterator::ValueType::kFloat:
+ bootstrap_frame->SetVRegFloat(vreg, jvalue.f);
+ vreg += 1;
+ break;
+ case EncodedArrayValueIterator::ValueType::kDouble:
+ bootstrap_frame->SetVRegDouble(vreg, jvalue.d);
+ vreg += 2;
+ break;
+ case EncodedArrayValueIterator::ValueType::kMethodType: {
+ uint32_t idx = static_cast<uint32_t>(jvalue.i);
+ ObjPtr<mirror::MethodType> ref =
+ class_linker->ResolveMethodType(*dex_file, idx, dex_cache, class_loader);
+ if (ref.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ bootstrap_frame->SetVRegReference(vreg, ref.Ptr());
+ vreg += 1;
+ break;
+ }
+ case EncodedArrayValueIterator::ValueType::kMethodHandle: {
+ uint32_t idx = static_cast<uint32_t>(jvalue.i);
+ ObjPtr<mirror::MethodHandle> ref =
+ class_linker->ResolveMethodHandle(idx, referrer);
+ if (ref.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ bootstrap_frame->SetVRegReference(vreg, ref.Ptr());
+ vreg += 1;
+ break;
+ }
+ case EncodedArrayValueIterator::ValueType::kString: {
+ dex::StringIndex idx(static_cast<uint32_t>(jvalue.i));
+ ObjPtr<mirror::String> ref = class_linker->ResolveString(*dex_file, idx, dex_cache);
+ if (ref.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ bootstrap_frame->SetVRegReference(vreg, ref.Ptr());
+ vreg += 1;
+ break;
+ }
+ case EncodedArrayValueIterator::ValueType::kType: {
+ dex::TypeIndex idx(static_cast<uint32_t>(jvalue.i));
+ ObjPtr<mirror::Class> ref =
+ class_linker->ResolveType(*dex_file, idx, dex_cache, class_loader);
+ if (ref.IsNull()) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+ bootstrap_frame->SetVRegReference(vreg, ref.Ptr());
+ vreg += 1;
+ break;
+ }
+ case EncodedArrayValueIterator::ValueType::kNull:
+ bootstrap_frame->SetVRegReference(vreg, nullptr);
+ vreg += 1;
+ break;
+ case EncodedArrayValueIterator::ValueType::kField:
+ case EncodedArrayValueIterator::ValueType::kMethod:
+ case EncodedArrayValueIterator::ValueType::kEnum:
+ case EncodedArrayValueIterator::ValueType::kArray:
+ case EncodedArrayValueIterator::ValueType::kAnnotation:
+ // Unreachable based on current EncodedArrayValueIterator::Next().
+ UNREACHABLE();
+ }
+
+ it.Next();
+ }
+
+ // Invoke the bootstrap method handle.
+ JValue result;
+
+ // This array of arguments is unused. DoInvokePolymorphic() operates on either a
+ // an argument array or a range, but always takes an array argument.
+ uint32_t args_unused[Instruction::kMaxVarArgRegs];
+ ArtMethod* invoke_exact =
+ jni::DecodeArtMethod(WellKnownClasses::java_lang_invoke_MethodHandle_invokeExact);
+ bool invoke_success = DoInvokePolymorphic<true /* is_range */>(self,
+ invoke_exact,
+ *bootstrap_frame,
+ bootstrap,
+ bootstrap_method_type,
+ args_unused,
+ 0,
+ &result);
+ if (!invoke_success) {
+ DCHECK(self->IsExceptionPending());
+ return nullptr;
+ }
+
+ Handle<mirror::Object> object(hs.NewHandle(result.GetL()));
+
+ // Check the result is not null.
+ if (UNLIKELY(object.IsNull())) {
+ ThrowNullPointerException("CallSite == null");
+ return nullptr;
+ }
+
+ // Check the result type is a subclass of CallSite.
+ if (UNLIKELY(!object->InstanceOf(mirror::CallSite::StaticClass()))) {
+ ThrowClassCastException(object->GetClass(), mirror::CallSite::StaticClass());
+ return nullptr;
+ }
+
+ Handle<mirror::CallSite> call_site =
+ hs.NewHandle(ObjPtr<mirror::CallSite>::DownCast(ObjPtr<mirror::Object>(result.GetL())));
+
+ // Check the call site target is not null as we're going to invoke it.
+ Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
+ if (UNLIKELY(target.IsNull())) {
+ ThrowNullPointerException("CallSite target == null");
+ return nullptr;
+ }
+
+ // Check the target method type matches the method type requested.
+ if (UNLIKELY(!target->GetMethodType()->IsExactMatch(method_type.Get()))) {
+ ThrowWrongMethodTypeException(target->GetMethodType(), method_type.Get());
+ return nullptr;
+ }
+
+ return call_site.Get();
+}
+
+template<bool is_range>
+bool DoInvokeCustom(Thread* self,
+ ShadowFrame& shadow_frame,
+ const Instruction* inst,
+ uint16_t inst_data,
+ JValue* result)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ // invoke-custom is not supported in transactions. In transactions
+ // there is a limited set of types supported. invoke-custom allows
+ // running arbitrary code and instantiating arbitrary types.
+ CHECK(!Runtime::Current()->IsActiveTransaction());
+ StackHandleScope<4> hs(self);
+ Handle<mirror::DexCache> dex_cache(hs.NewHandle(shadow_frame.GetMethod()->GetDexCache()));
+ const uint32_t call_site_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
+ MutableHandle<mirror::CallSite>
+ call_site(hs.NewHandle(dex_cache->GetResolvedCallSite(call_site_idx)));
+ if (call_site.IsNull()) {
+ call_site.Assign(InvokeBootstrapMethod(self, shadow_frame, call_site_idx));
+ if (UNLIKELY(call_site.IsNull())) {
+ CHECK(self->IsExceptionPending());
+ ThrowWrappedBootstrapMethodError("Exception from call site #%u bootstrap method",
+ call_site_idx);
+ result->SetJ(0);
+ return false;
+ }
+ mirror::CallSite* winning_call_site =
+ dex_cache->SetResolvedCallSite(call_site_idx, call_site.Get());
+ call_site.Assign(winning_call_site);
+ }
+
+ // CallSite.java checks the re-assignment of the call site target
+ // when mutating call site targets. We only check the target is
+ // non-null and has the right type during bootstrap method execution.
+ Handle<mirror::MethodHandle> target = hs.NewHandle(call_site->GetTarget());
+ Handle<mirror::MethodType> target_method_type = hs.NewHandle(target->GetMethodType());
+ DCHECK_EQ(static_cast<size_t>(inst->VRegA()), target_method_type->NumberOfVRegs());
+
+ uint32_t args[Instruction::kMaxVarArgRegs];
+ if (is_range) {
+ args[0] = inst->VRegC_3rc();
+ } else {
+ inst->GetVarArgs(args, inst_data);
+ }
+
+ ArtMethod* invoke_exact =
+ jni::DecodeArtMethod(WellKnownClasses::java_lang_invoke_MethodHandle_invokeExact);
+ return DoInvokePolymorphic<is_range>(self,
+ invoke_exact,
+ shadow_frame,
+ target,
+ target_method_type,
+ args,
+ args[0],
+ result);
+}
+
template <bool is_range>
inline void CopyRegisters(ShadowFrame& caller_frame,
ShadowFrame* callee_frame,
@@ -975,17 +1244,24 @@ EXPLICIT_DO_CALL_TEMPLATE_DECL(true, false);
EXPLICIT_DO_CALL_TEMPLATE_DECL(true, true);
#undef EXPLICIT_DO_CALL_TEMPLATE_DECL
-// Explicit DoInvokePolymorphic template function declarations.
-#define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range, _do_assignability_check) \
- template REQUIRES_SHARED(Locks::mutator_lock_) \
- bool DoInvokePolymorphic<_is_range, _do_assignability_check>( \
- Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \
+// Explicit DoInvokeCustom template function declarations.
+#define EXPLICIT_DO_INVOKE_CUSTOM_TEMPLATE_DECL(_is_range) \
+ template REQUIRES_SHARED(Locks::mutator_lock_) \
+ bool DoInvokeCustom<_is_range>( \
+ Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \
uint16_t inst_data, JValue* result)
+EXPLICIT_DO_INVOKE_CUSTOM_TEMPLATE_DECL(false);
+EXPLICIT_DO_INVOKE_CUSTOM_TEMPLATE_DECL(true);
+#undef EXPLICIT_DO_INVOKE_CUSTOM_TEMPLATE_DECL
-EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false, false);
-EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false, true);
-EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true, false);
-EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true, true);
+// Explicit DoInvokePolymorphic template function declarations.
+#define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \
+ template REQUIRES_SHARED(Locks::mutator_lock_) \
+ bool DoInvokePolymorphic<_is_range>( \
+ Thread* self, ShadowFrame& shadow_frame, const Instruction* inst, \
+ uint16_t inst_data, JValue* result)
+EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
+EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
#undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
// Explicit DoFilledNewArray template function declarations.
diff --git a/runtime/interpreter/interpreter_common.h b/runtime/interpreter/interpreter_common.h
index 7ef3508164..6b22af9829 100644
--- a/runtime/interpreter/interpreter_common.h
+++ b/runtime/interpreter/interpreter_common.h
@@ -40,9 +40,11 @@
#include "entrypoints/entrypoint_utils-inl.h"
#include "handle_scope-inl.h"
#include "jit/jit.h"
+#include "mirror/call_site.h"
#include "mirror/class-inl.h"
#include "mirror/dex_cache.h"
#include "mirror/method.h"
+#include "mirror/method_handles_lookup.h"
#include "mirror/object-inl.h"
#include "mirror/object_array-inl.h"
#include "mirror/string-inl.h"
@@ -154,13 +156,21 @@ static inline bool DoInvoke(Thread* self,
}
// Performs a signature polymorphic invoke (invoke-polymorphic/invoke-polymorphic-range).
-template<bool is_range, bool do_access_check>
+template<bool is_range>
bool DoInvokePolymorphic(Thread* self,
ShadowFrame& shadow_frame,
const Instruction* inst,
uint16_t inst_data,
JValue* result);
+// Performs a custom invoke (invoke-custom/invoke-custom-range).
+template<bool is_range>
+bool DoInvokeCustom(Thread* self,
+ ShadowFrame& shadow_frame,
+ const Instruction* inst,
+ uint16_t inst_data,
+ JValue* result);
+
// Handles invoke-virtual-quick and invoke-virtual-quick-range instructions.
// Returns true on success, otherwise throws an exception and returns false.
template<bool is_range>
diff --git a/runtime/interpreter/interpreter_switch_impl.cc b/runtime/interpreter/interpreter_switch_impl.cc
index a77a3fc2b3..b191dd79a1 100644
--- a/runtime/interpreter/interpreter_switch_impl.cc
+++ b/runtime/interpreter/interpreter_switch_impl.cc
@@ -1524,7 +1524,7 @@ JValue ExecuteSwitchImpl(Thread* self, const DexFile::CodeItem* code_item,
case Instruction::INVOKE_POLYMORPHIC: {
PREAMBLE();
DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
- bool success = DoInvokePolymorphic<false, do_access_check>(
+ bool success = DoInvokePolymorphic<false /* is_range */>(
self, shadow_frame, inst, inst_data, &result_register);
POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_4xx);
break;
@@ -1532,11 +1532,27 @@ JValue ExecuteSwitchImpl(Thread* self, const DexFile::CodeItem* code_item,
case Instruction::INVOKE_POLYMORPHIC_RANGE: {
PREAMBLE();
DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
- bool success = DoInvokePolymorphic<true, do_access_check>(
+ bool success = DoInvokePolymorphic<true /* is_range */>(
self, shadow_frame, inst, inst_data, &result_register);
POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_4xx);
break;
}
+ case Instruction::INVOKE_CUSTOM: {
+ PREAMBLE();
+ DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
+ bool success = DoInvokeCustom<false /* is_range */>(
+ self, shadow_frame, inst, inst_data, &result_register);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_3xx);
+ break;
+ }
+ case Instruction::INVOKE_CUSTOM_RANGE: {
+ PREAMBLE();
+ DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
+ bool success = DoInvokeCustom<true /* is_range */>(
+ self, shadow_frame, inst, inst_data, &result_register);
+ POSSIBLY_HANDLE_PENDING_EXCEPTION(!success, Next_3xx);
+ break;
+ }
case Instruction::NEG_INT:
PREAMBLE();
shadow_frame.SetVReg(
@@ -2315,7 +2331,7 @@ JValue ExecuteSwitchImpl(Thread* self, const DexFile::CodeItem* code_item,
break;
case Instruction::UNUSED_3E ... Instruction::UNUSED_43:
case Instruction::UNUSED_F3 ... Instruction::UNUSED_F9:
- case Instruction::UNUSED_FC ... Instruction::UNUSED_FF:
+ case Instruction::UNUSED_FE ... Instruction::UNUSED_FF:
case Instruction::UNUSED_79:
case Instruction::UNUSED_7A:
UnexpectedOpcode(inst, shadow_frame);
diff --git a/runtime/interpreter/unstarted_runtime.cc b/runtime/interpreter/unstarted_runtime.cc
index 545cc1ad42..c7e84420d3 100644
--- a/runtime/interpreter/unstarted_runtime.cc
+++ b/runtime/interpreter/unstarted_runtime.cc
@@ -124,7 +124,7 @@ static void UnstartedRuntimeFindClass(Thread* self, Handle<mirror::String> class
const std::string& method_name, bool initialize_class,
bool abort_if_not_found)
REQUIRES_SHARED(Locks::mutator_lock_) {
- CHECK(className.Get() != nullptr);
+ CHECK(className != nullptr);
std::string descriptor(DotToDescriptor(className->ToModifiedUtf8().c_str()));
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
@@ -239,7 +239,7 @@ void UnstartedRuntime::UnstartedClassNewInstance(
Handle<mirror::Class> h_klass(hs.NewHandle(klass));
// Check that it's not null.
- if (h_klass.Get() == nullptr) {
+ if (h_klass == nullptr) {
AbortTransactionOrFail(self, "Class reference is null for newInstance");
return;
}
@@ -263,7 +263,7 @@ void UnstartedRuntime::UnstartedClassNewInstance(
auto* cons = h_klass->FindDeclaredDirectMethod("<init>", "()V", cl->GetImagePointerSize());
if (cons != nullptr) {
Handle<mirror::Object> h_obj(hs.NewHandle(klass->AllocObject(self)));
- CHECK(h_obj.Get() != nullptr); // We don't expect OOM at compile-time.
+ CHECK(h_obj != nullptr); // We don't expect OOM at compile-time.
EnterInterpreterFromInvoke(self, cons, h_obj.Get(), nullptr, nullptr);
if (!self->IsExceptionPending()) {
result->SetL(h_obj.Get());
@@ -542,7 +542,7 @@ static void GetResourceAsStream(Thread* self,
// Create byte array for content.
Handle<mirror::ByteArray> h_array(hs.NewHandle(mirror::ByteArray::Alloc(self, map_size)));
- if (h_array.Get() == nullptr) {
+ if (h_array == nullptr) {
AbortTransactionOrFail(self, "Could not find/create byte array class");
return;
}
@@ -556,7 +556,7 @@ static void GetResourceAsStream(Thread* self,
runtime->GetClassLinker()->FindClass(self,
"Ljava/io/ByteArrayInputStream;",
ScopedNullHandle<mirror::ClassLoader>())));
- if (h_class.Get() == nullptr) {
+ if (h_class == nullptr) {
AbortTransactionOrFail(self, "Could not find ByteArrayInputStream class");
return;
}
@@ -566,7 +566,7 @@ static void GetResourceAsStream(Thread* self,
}
Handle<mirror::Object> h_obj(hs.NewHandle(h_class->AllocObject(self)));
- if (h_obj.Get() == nullptr) {
+ if (h_obj == nullptr) {
AbortTransactionOrFail(self, "Could not allocate ByteArrayInputStream object");
return;
}
@@ -800,7 +800,7 @@ static void GetSystemProperty(Thread* self,
StackHandleScope<4> hs(self);
Handle<mirror::String> h_key(
hs.NewHandle(reinterpret_cast<mirror::String*>(shadow_frame->GetVRegReference(arg_offset))));
- if (h_key.Get() == nullptr) {
+ if (h_key == nullptr) {
AbortTransactionOrFail(self, "getProperty key was null");
return;
}
@@ -815,7 +815,7 @@ static void GetSystemProperty(Thread* self,
class_linker->FindClass(self,
"Ljava/lang/AndroidHardcodedSystemProperties;",
ScopedNullHandle<mirror::ClassLoader>())));
- if (h_props_class.Get() == nullptr) {
+ if (h_props_class == nullptr) {
AbortTransactionOrFail(self, "Could not find AndroidHardcodedSystemProperties");
return;
}
@@ -837,7 +837,7 @@ static void GetSystemProperty(Thread* self,
ObjPtr<mirror::Object> props = static_properties->GetObject(h_props_class.Get());
Handle<mirror::ObjectArray<mirror::ObjectArray<mirror::String>>> h_2string_array(hs.NewHandle(
props->AsObjectArray<mirror::ObjectArray<mirror::String>>()));
- if (h_2string_array.Get() == nullptr) {
+ if (h_2string_array == nullptr) {
AbortTransactionOrFail(self, "Field %s is null", kAndroidHardcodedSystemPropertiesFieldName);
return;
}
@@ -849,7 +849,7 @@ static void GetSystemProperty(Thread* self,
hs.NewHandle<mirror::ObjectArray<mirror::String>>(nullptr));
for (int32_t i = 0; i < prop_count; ++i) {
h_string_array.Assign(h_2string_array->Get(i));
- if (h_string_array.Get() == nullptr ||
+ if (h_string_array == nullptr ||
h_string_array->GetLength() != 2 ||
h_string_array->Get(0) == nullptr) {
AbortTransactionOrFail(self,
@@ -924,7 +924,7 @@ static ObjPtr<mirror::Object> CreateInstanceOf(Thread* self, const char* class_d
StackHandleScope<2> hs(self);
Handle<mirror::Class> h_class(hs.NewHandle(klass));
Handle<mirror::Object> h_obj(hs.NewHandle(h_class->AllocObject(self)));
- if (h_obj.Get() != nullptr) {
+ if (h_obj != nullptr) {
ArtMethod* init_method = h_class->FindDirectMethod(
"<init>", "()V", class_linker->GetImagePointerSize());
if (init_method == nullptr) {
diff --git a/runtime/interpreter/unstarted_runtime_test.cc b/runtime/interpreter/unstarted_runtime_test.cc
index 31be587e9c..16b7b55e00 100644
--- a/runtime/interpreter/unstarted_runtime_test.cc
+++ b/runtime/interpreter/unstarted_runtime_test.cc
@@ -960,7 +960,7 @@ TEST_F(UnstartedRuntimeTest, ThreadLocalGet) {
class_linker->FindClass(self,
"Lsun/misc/FloatingDecimal;",
ScopedNullHandle<mirror::ClassLoader>()));
- ASSERT_TRUE(floating_decimal.Get() != nullptr);
+ ASSERT_TRUE(floating_decimal != nullptr);
ASSERT_TRUE(class_linker->EnsureInitialized(self, floating_decimal, true, true));
ArtMethod* caller_method = floating_decimal->FindDeclaredDirectMethod(
@@ -1014,7 +1014,7 @@ TEST_F(UnstartedRuntimeTest, FloatConversion) {
class_linker->FindClass(self,
"Ljava/lang/Double;",
ScopedNullHandle<mirror::ClassLoader>()));
- ASSERT_TRUE(double_class.Get() != nullptr);
+ ASSERT_TRUE(double_class != nullptr);
ASSERT_TRUE(class_linker->EnsureInitialized(self, double_class, true, true));
ArtMethod* method = double_class->FindDeclaredDirectMethod("toString",
diff --git a/runtime/java_vm_ext.cc b/runtime/java_vm_ext.cc
index e0f28adc4f..a341cdb89f 100644
--- a/runtime/java_vm_ext.cc
+++ b/runtime/java_vm_ext.cc
@@ -572,7 +572,7 @@ jweak JavaVMExt::AddWeakGlobalRef(Thread* self, ObjPtr<mirror::Object> obj) {
while (!kUseReadBarrier && UNLIKELY(!MayAccessWeakGlobals(self))) {
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
// presence of threads blocking for weak ref access.
- self->CheckEmptyCheckpoint();
+ self->CheckEmptyCheckpointFromWeakRefAccess(Locks::jni_weak_globals_lock_);
weak_globals_add_condition_.WaitHoldingLocks(self);
}
IndirectRef ref = weak_globals_.Add(kIRTFirstSegment, obj);
@@ -706,7 +706,7 @@ ObjPtr<mirror::Object> JavaVMExt::DecodeWeakGlobalLocked(Thread* self, IndirectR
while (UNLIKELY(!MayAccessWeakGlobals(self))) {
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
// presence of threads blocking for weak ref access.
- self->CheckEmptyCheckpoint();
+ self->CheckEmptyCheckpointFromWeakRefAccess(Locks::jni_weak_globals_lock_);
weak_globals_add_condition_.WaitHoldingLocks(self);
}
return weak_globals_.Get(ref);
@@ -731,7 +731,7 @@ bool JavaVMExt::IsWeakGlobalCleared(Thread* self, IndirectRef ref) {
while (UNLIKELY(!MayAccessWeakGlobals(self))) {
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
// presence of threads blocking for weak ref access.
- self->CheckEmptyCheckpoint();
+ self->CheckEmptyCheckpointFromWeakRefAccess(Locks::jni_weak_globals_lock_);
weak_globals_add_condition_.WaitHoldingLocks(self);
}
// When just checking a weak ref has been cleared, avoid triggering the read barrier in decode
diff --git a/runtime/jdwp/jdwp.h b/runtime/jdwp/jdwp.h
index e5d34e1a2d..86af6d44db 100644
--- a/runtime/jdwp/jdwp.h
+++ b/runtime/jdwp/jdwp.h
@@ -22,6 +22,7 @@
#include "jdwp/jdwp_bits.h"
#include "jdwp/jdwp_constants.h"
#include "jdwp/jdwp_expand_buf.h"
+#include "obj_ptr.h"
#include <pthread.h>
#include <stddef.h>
@@ -286,6 +287,10 @@ struct JdwpState {
REQUIRES(!event_list_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
+ void UnregisterLocationEventsOnClass(ObjPtr<mirror::Class> klass)
+ REQUIRES(!event_list_lock_)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
/*
* Unregister all events.
*/
diff --git a/runtime/jdwp/jdwp_event.cc b/runtime/jdwp/jdwp_event.cc
index 172f52a974..96249f9b58 100644
--- a/runtime/jdwp/jdwp_event.cc
+++ b/runtime/jdwp/jdwp_event.cc
@@ -251,6 +251,43 @@ JdwpError JdwpState::RegisterEvent(JdwpEvent* pEvent) {
return ERR_NONE;
}
+void JdwpState::UnregisterLocationEventsOnClass(ObjPtr<mirror::Class> klass) {
+ VLOG(jdwp) << "Removing events within " << klass->PrettyClass();
+ StackHandleScope<1> hs(Thread::Current());
+ Handle<mirror::Class> h_klass(hs.NewHandle(klass));
+ std::vector<JdwpEvent*> to_remove;
+ MutexLock mu(Thread::Current(), event_list_lock_);
+ for (JdwpEvent* cur_event = event_list_; cur_event != nullptr; cur_event = cur_event->next) {
+ // Fill in the to_remove list
+ bool found_event = false;
+ for (int i = 0; i < cur_event->modCount && !found_event; i++) {
+ JdwpEventMod& mod = cur_event->mods[i];
+ switch (mod.modKind) {
+ case MK_LOCATION_ONLY: {
+ JdwpLocation& loc = mod.locationOnly.loc;
+ JdwpError error;
+ ObjPtr<mirror::Class> breakpoint_class(
+ Dbg::GetObjectRegistry()->Get<art::mirror::Class*>(loc.class_id, &error));
+ DCHECK_EQ(error, ERR_NONE);
+ if (breakpoint_class == h_klass.Get()) {
+ to_remove.push_back(cur_event);
+ found_event = true;
+ }
+ break;
+ }
+ default:
+ // TODO Investigate how we should handle non-locationOnly events.
+ break;
+ }
+ }
+ }
+
+ for (JdwpEvent* event : to_remove) {
+ UnregisterEvent(event);
+ EventFree(event);
+ }
+}
+
/*
* Remove an event from the list. This will also remove the event from
* any optimization tables, e.g. breakpoints.
diff --git a/runtime/jdwp/object_registry.cc b/runtime/jdwp/object_registry.cc
index 4615574947..bd7251baeb 100644
--- a/runtime/jdwp/object_registry.cc
+++ b/runtime/jdwp/object_registry.cc
@@ -57,7 +57,7 @@ JDWP::ObjectId ObjectRegistry::Add(ObjPtr<mirror::Object> o) {
// Template instantiations must be declared below.
template<class T>
JDWP::ObjectId ObjectRegistry::Add(Handle<T> obj_h) {
- if (obj_h.Get() == nullptr) {
+ if (obj_h == nullptr) {
return 0;
}
return InternalAdd(obj_h);
@@ -76,7 +76,7 @@ JDWP::ObjectId ObjectRegistry::Add(Handle<mirror::Throwable> obj_h);
template<class T>
JDWP::ObjectId ObjectRegistry::InternalAdd(Handle<T> obj_h) {
- CHECK(obj_h.Get() != nullptr);
+ CHECK(obj_h != nullptr);
Thread* const self = Thread::Current();
self->AssertNoPendingException();
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index f5151b588a..60ab275641 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -556,12 +556,13 @@ uint8_t* JitCodeCache::CommitCodeInternal(Thread* self,
// Flush data cache, as compiled code references literals in it.
FlushDataCache(reinterpret_cast<char*>(roots_data),
reinterpret_cast<char*>(roots_data + data_size));
- // Flush caches before we remove write permission because on some ARMv8 hardware,
- // flushing caches require write permissions.
+ // Flush caches before we remove write permission because some ARMv8 Qualcomm kernels may
+ // trigger a segfault if a page fault occurs when requesting a cache maintenance operation.
+ // This is a kernel bug that we need to work around until affected devices (e.g. Nexus 5X and
+ // 6P) stop being supported or their kernels are fixed.
//
- // For reference, here are kernel patches discussing about this issue:
- // https://android.googlesource.com/kernel/msm/%2B/0e7f7bcc3fc87489cda5aa6aff8ce40eed912279
- // https://patchwork.kernel.org/patch/9047921/
+ // For reference, this behavior is caused by this commit:
+ // https://android.googlesource.com/kernel/msm/+/3fbe6bc28a6b9939d0650f2f17eb5216c719950c
FlushInstructionCache(reinterpret_cast<char*>(code_ptr),
reinterpret_cast<char*>(code_ptr + code_size));
DCHECK(!Runtime::Current()->IsAotCompiler());
@@ -685,6 +686,10 @@ void JitCodeCache::NotifyMethodRedefined(ArtMethod* method) {
// shouldn't be used since it is no longer logically in the jit code cache.
// TODO We should add DCHECKS that validate that the JIT is paused when this method is entered.
void JitCodeCache::MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method) {
+ // Native methods have no profiling info and need no special handling from the JIT code cache.
+ if (old_method->IsNative()) {
+ return;
+ }
MutexLock mu(Thread::Current(), lock_);
// Update ProfilingInfo to the new one and remove it from the old_method.
if (old_method->GetProfilingInfo(kRuntimePointerSize) != nullptr) {
diff --git a/runtime/jit/profile_compilation_info.cc b/runtime/jit/profile_compilation_info.cc
index 9ba2d1a355..54fc0386e1 100644
--- a/runtime/jit/profile_compilation_info.cc
+++ b/runtime/jit/profile_compilation_info.cc
@@ -664,6 +664,38 @@ std::string ProfileCompilationInfo::DumpInfo(const std::vector<const DexFile*>*
return os.str();
}
+void ProfileCompilationInfo::GetClassNames(
+ const std::vector<std::unique_ptr<const DexFile>>* dex_files,
+ std::set<std::string>* class_names) const {
+ std::unique_ptr<const std::vector<const DexFile*>> non_owning_dex_files(
+ MakeNonOwningVector(dex_files));
+ GetClassNames(non_owning_dex_files.get(), class_names);
+}
+
+void ProfileCompilationInfo::GetClassNames(const std::vector<const DexFile*>* dex_files,
+ std::set<std::string>* class_names) const {
+ if (info_.empty()) {
+ return;
+ }
+ for (const auto& it : info_) {
+ const std::string& location = it.first;
+ const DexFileData& dex_data = it.second;
+ const DexFile* dex_file = nullptr;
+ if (dex_files != nullptr) {
+ for (size_t i = 0; i < dex_files->size(); i++) {
+ if (location == (*dex_files)[i]->GetLocation()) {
+ dex_file = (*dex_files)[i];
+ }
+ }
+ }
+ for (const auto class_it : dex_data.class_set) {
+ if (dex_file != nullptr) {
+ class_names->insert(std::string(dex_file->PrettyType(class_it)));
+ }
+ }
+ }
+}
+
bool ProfileCompilationInfo::Equals(const ProfileCompilationInfo& other) {
return info_.Equals(other.info_);
}
diff --git a/runtime/jit/profile_compilation_info.h b/runtime/jit/profile_compilation_info.h
index b1587c0070..758b46d74a 100644
--- a/runtime/jit/profile_compilation_info.h
+++ b/runtime/jit/profile_compilation_info.h
@@ -78,6 +78,11 @@ class ProfileCompilationInfo {
std::string DumpInfo(const std::vector<const DexFile*>* dex_files,
bool print_full_dex_location = true) const;
+ void GetClassNames(const std::vector<std::unique_ptr<const DexFile>>* dex_files,
+ std::set<std::string>* class_names) const;
+ void GetClassNames(const std::vector<const DexFile*>* dex_files,
+ std::set<std::string>* class_names) const;
+
bool Equals(const ProfileCompilationInfo& other);
static std::string GetProfileDexFileKey(const std::string& dex_location);
diff --git a/runtime/jni_internal.cc b/runtime/jni_internal.cc
index 3c641b0b75..547b5b8a2d 100644
--- a/runtime/jni_internal.cc
+++ b/runtime/jni_internal.cc
@@ -196,7 +196,7 @@ static jfieldID FindFieldID(const ScopedObjectAccess& soa, jclass jni_class, con
StackHandleScope<2> hs(soa.Self());
Handle<mirror::Class> c(
hs.NewHandle(EnsureInitialized(soa.Self(), soa.Decode<mirror::Class>(jni_class))));
- if (c.Get() == nullptr) {
+ if (c == nullptr) {
return nullptr;
}
ArtField* field = nullptr;
diff --git a/runtime/jobject_comparator.cc b/runtime/jobject_comparator.cc
index 443f095d05..4c45e3839b 100644
--- a/runtime/jobject_comparator.cc
+++ b/runtime/jobject_comparator.cc
@@ -34,9 +34,9 @@ bool JobjectComparator::operator()(jobject jobj1, jobject jobj2) const {
StackHandleScope<2> hs(soa.Self());
Handle<mirror::Object> obj1(hs.NewHandle(soa.Decode<mirror::Object>(jobj1)));
Handle<mirror::Object> obj2(hs.NewHandle(soa.Decode<mirror::Object>(jobj2)));
- if (obj1.Get() == nullptr) {
+ if (obj1 == nullptr) {
return true;
- } else if (obj2.Get() == nullptr) {
+ } else if (obj2 == nullptr) {
return false;
}
// Sort by class...
diff --git a/runtime/mem_map.cc b/runtime/mem_map.cc
index dce56b3c58..93c212bafb 100644
--- a/runtime/mem_map.cc
+++ b/runtime/mem_map.cc
@@ -400,7 +400,7 @@ MemMap* MemMap::MapFileAtAddress(uint8_t* expected_ptr,
// reuse means it is okay that it overlaps an existing page mapping.
// Only use this if you actually made the page reservation yourself.
CHECK(expected_ptr != nullptr);
-
+ DCHECK(error_msg != nullptr);
DCHECK(ContainedWithinExistingMap(expected_ptr, byte_count, error_msg))
<< ((error_msg != nullptr) ? *error_msg : std::string());
flags |= MAP_FIXED;
diff --git a/runtime/method_handles.cc b/runtime/method_handles.cc
index 99886e5c2f..6ecfd8c714 100644
--- a/runtime/method_handles.cc
+++ b/runtime/method_handles.cc
@@ -220,7 +220,7 @@ bool ConvertJValueCommon(
StackHandleScope<2> hs(Thread::Current());
Handle<mirror::Class> h_to(hs.NewHandle(to));
Handle<mirror::Object> h_obj(hs.NewHandle(src_value.GetL()));
- if (h_obj.Get() != nullptr && !to->IsAssignableFrom(h_obj->GetClass())) {
+ if (h_obj != nullptr && !to->IsAssignableFrom(h_obj->GetClass())) {
ThrowClassCastException(h_to.Get(), h_obj->GetClass());
return false;
}
@@ -555,7 +555,7 @@ static inline bool DoCallTransform(ArtMethod* called_method,
Handle<mirror::MethodType> callee_type,
Thread* self,
ShadowFrame& shadow_frame,
- Handle<mirror::MethodHandleImpl> receiver,
+ Handle<mirror::MethodHandle> receiver,
const uint32_t (&args)[Instruction::kMaxVarArgRegs],
uint32_t first_arg,
JValue* result)
@@ -599,7 +599,7 @@ static inline bool DoCallTransform(ArtMethod* called_method,
// Something went wrong while creating the emulated stack frame, we should
// throw the pending exception.
- if (sf.Get() == nullptr) {
+ if (sf == nullptr) {
DCHECK(self->IsExceptionPending());
return false;
}
@@ -645,7 +645,7 @@ inline static ObjPtr<mirror::Class> GetAndInitializeDeclaringClass(Thread* self,
template <bool is_range>
bool DoInvokePolymorphicUnchecked(Thread* self,
ShadowFrame& shadow_frame,
- Handle<mirror::MethodHandleImpl> method_handle,
+ Handle<mirror::MethodHandle> method_handle,
Handle<mirror::MethodType> callsite_type,
const uint32_t (&args)[Instruction::kMaxVarArgRegs],
uint32_t first_arg,
@@ -780,7 +780,6 @@ inline static void DoFieldGetForInvokePolymorphic(Thread* self,
}
// Helper for setters in invoke-polymorphic.
-template <bool do_assignability_check>
inline bool DoFieldPutForInvokePolymorphic(Thread* self,
ShadowFrame& shadow_frame,
ObjPtr<mirror::Object>& obj,
@@ -788,30 +787,33 @@ inline bool DoFieldPutForInvokePolymorphic(Thread* self,
Primitive::Type field_type,
const JValue& value)
REQUIRES_SHARED(Locks::mutator_lock_) {
- static const bool kTransaction = false;
+ DCHECK(!Runtime::Current()->IsActiveTransaction());
+ static const bool kTransaction = false; // Not in a transaction.
+ static const bool kAssignabilityCheck = false; // No access check.
switch (field_type) {
case Primitive::kPrimBoolean:
- return DoFieldPutCommon<Primitive::kPrimBoolean, do_assignability_check, kTransaction>(
- self, shadow_frame, obj, field, value);
+ return
+ DoFieldPutCommon<Primitive::kPrimBoolean, kAssignabilityCheck, kTransaction>(
+ self, shadow_frame, obj, field, value);
case Primitive::kPrimByte:
- return DoFieldPutCommon<Primitive::kPrimByte, do_assignability_check, kTransaction>(
+ return DoFieldPutCommon<Primitive::kPrimByte, kAssignabilityCheck, kTransaction>(
self, shadow_frame, obj, field, value);
case Primitive::kPrimChar:
- return DoFieldPutCommon<Primitive::kPrimChar, do_assignability_check, kTransaction>(
+ return DoFieldPutCommon<Primitive::kPrimChar, kAssignabilityCheck, kTransaction>(
self, shadow_frame, obj, field, value);
case Primitive::kPrimShort:
- return DoFieldPutCommon<Primitive::kPrimShort, do_assignability_check, kTransaction>(
+ return DoFieldPutCommon<Primitive::kPrimShort, kAssignabilityCheck, kTransaction>(
self, shadow_frame, obj, field, value);
case Primitive::kPrimInt:
case Primitive::kPrimFloat:
- return DoFieldPutCommon<Primitive::kPrimInt, do_assignability_check, kTransaction>(
+ return DoFieldPutCommon<Primitive::kPrimInt, kAssignabilityCheck, kTransaction>(
self, shadow_frame, obj, field, value);
case Primitive::kPrimLong:
case Primitive::kPrimDouble:
- return DoFieldPutCommon<Primitive::kPrimLong, do_assignability_check, kTransaction>(
+ return DoFieldPutCommon<Primitive::kPrimLong, kAssignabilityCheck, kTransaction>(
self, shadow_frame, obj, field, value);
case Primitive::kPrimNot:
- return DoFieldPutCommon<Primitive::kPrimNot, do_assignability_check, kTransaction>(
+ return DoFieldPutCommon<Primitive::kPrimNot, kAssignabilityCheck, kTransaction>(
self, shadow_frame, obj, field, value);
case Primitive::kPrimVoid:
LOG(FATAL) << "Unreachable: " << field_type;
@@ -855,10 +857,10 @@ static JValue GetValueFromShadowFrame(const ShadowFrame& shadow_frame,
return field_value;
}
-template <bool is_range, bool do_conversions, bool do_assignability_check>
+template <bool is_range, bool do_conversions>
bool DoInvokePolymorphicFieldAccess(Thread* self,
ShadowFrame& shadow_frame,
- Handle<mirror::MethodHandleImpl> method_handle,
+ Handle<mirror::MethodHandle> method_handle,
Handle<mirror::MethodType> callsite_type,
const uint32_t (&args)[Instruction::kMaxVarArgRegs],
uint32_t first_arg,
@@ -903,12 +905,7 @@ bool DoInvokePolymorphicFieldAccess(Thread* self,
return false;
}
ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
- return DoFieldPutForInvokePolymorphic<do_assignability_check>(self,
- shadow_frame,
- obj,
- field,
- field_type,
- value);
+ return DoFieldPutForInvokePolymorphic(self, shadow_frame, obj, field, field_type, value);
}
case mirror::MethodHandle::kStaticPut: {
ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
@@ -922,12 +919,7 @@ bool DoInvokePolymorphicFieldAccess(Thread* self,
DCHECK(self->IsExceptionPending());
return false;
}
- return DoFieldPutForInvokePolymorphic<do_assignability_check>(self,
- shadow_frame,
- obj,
- field,
- field_type,
- value);
+ return DoFieldPutForInvokePolymorphic(self, shadow_frame, obj, field, field_type, value);
}
default:
LOG(FATAL) << "Unreachable: " << handle_kind;
@@ -935,10 +927,10 @@ bool DoInvokePolymorphicFieldAccess(Thread* self,
}
}
-template <bool is_range, bool do_assignability_check>
+template <bool is_range>
static inline bool DoInvokePolymorphicNonExact(Thread* self,
ShadowFrame& shadow_frame,
- Handle<mirror::MethodHandleImpl> method_handle,
+ Handle<mirror::MethodHandle> method_handle,
Handle<mirror::MethodType> callsite_type,
const uint32_t (&args)[Instruction::kMaxVarArgRegs],
uint32_t first_arg,
@@ -959,7 +951,7 @@ static inline bool DoInvokePolymorphicNonExact(Thread* self,
if (IsFieldAccess(handle_kind)) {
if (UNLIKELY(callsite_type->IsExactMatch(handle_type.Ptr()))) {
const bool do_convert = false;
- return DoInvokePolymorphicFieldAccess<is_range, do_convert, do_assignability_check>(
+ return DoInvokePolymorphicFieldAccess<is_range, do_convert>(
self,
shadow_frame,
method_handle,
@@ -969,7 +961,7 @@ static inline bool DoInvokePolymorphicNonExact(Thread* self,
result);
} else {
const bool do_convert = true;
- return DoInvokePolymorphicFieldAccess<is_range, do_convert, do_assignability_check>(
+ return DoInvokePolymorphicFieldAccess<is_range, do_convert>(
self,
shadow_frame,
method_handle,
@@ -999,10 +991,10 @@ static inline bool DoInvokePolymorphicNonExact(Thread* self,
}
}
-template <bool is_range, bool do_assignability_check>
+template <bool is_range>
bool DoInvokePolymorphicExact(Thread* self,
ShadowFrame& shadow_frame,
- Handle<mirror::MethodHandleImpl> method_handle,
+ Handle<mirror::MethodHandle> method_handle,
Handle<mirror::MethodType> callsite_type,
const uint32_t (&args)[Instruction::kMaxVarArgRegs],
uint32_t first_arg,
@@ -1018,13 +1010,13 @@ bool DoInvokePolymorphicExact(Thread* self,
ThrowWrongMethodTypeException(nominal_type.Ptr(), callsite_type.Get());
return false;
}
- return DoInvokePolymorphicNonExact<is_range, do_assignability_check>(self,
- shadow_frame,
- method_handle,
- callsite_type,
- args,
- first_arg,
- result);
+ return DoInvokePolymorphicNonExact<is_range>(self,
+ shadow_frame,
+ method_handle,
+ callsite_type,
+ args,
+ first_arg,
+ result);
}
ObjPtr<mirror::MethodType> handle_type(method_handle->GetMethodType());
@@ -1036,7 +1028,7 @@ bool DoInvokePolymorphicExact(Thread* self,
const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
if (IsFieldAccess(handle_kind)) {
const bool do_convert = false;
- return DoInvokePolymorphicFieldAccess<is_range, do_convert, do_assignability_check>(
+ return DoInvokePolymorphicFieldAccess<is_range, do_convert>(
self,
shadow_frame,
method_handle,
@@ -1057,51 +1049,49 @@ bool DoInvokePolymorphicExact(Thread* self,
} // namespace
-template <bool is_range, bool do_assignability_check>
+template <bool is_range>
bool DoInvokePolymorphic(Thread* self,
ArtMethod* invoke_method,
ShadowFrame& shadow_frame,
- Handle<mirror::MethodHandleImpl> method_handle,
+ Handle<mirror::MethodHandle> method_handle,
Handle<mirror::MethodType> callsite_type,
const uint32_t (&args)[Instruction::kMaxVarArgRegs],
uint32_t first_arg,
JValue* result)
REQUIRES_SHARED(Locks::mutator_lock_) {
if (IsMethodHandleInvokeExact(invoke_method)) {
- return DoInvokePolymorphicExact<is_range, do_assignability_check>(self,
- shadow_frame,
- method_handle,
- callsite_type,
- args,
- first_arg,
- result);
+ return DoInvokePolymorphicExact<is_range>(self,
+ shadow_frame,
+ method_handle,
+ callsite_type,
+ args,
+ first_arg,
+ result);
} else {
- return DoInvokePolymorphicNonExact<is_range, do_assignability_check>(self,
- shadow_frame,
- method_handle,
- callsite_type,
- args,
- first_arg,
- result);
+ return DoInvokePolymorphicNonExact<is_range>(self,
+ shadow_frame,
+ method_handle,
+ callsite_type,
+ args,
+ first_arg,
+ result);
}
}
-#define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range, _do_assignability_check) \
-template REQUIRES_SHARED(Locks::mutator_lock_) \
-bool DoInvokePolymorphic<_is_range, _do_assignability_check>( \
- Thread* self, \
- ArtMethod* invoke_method, \
- ShadowFrame& shadow_frame, \
- Handle<mirror::MethodHandleImpl> method_handle, \
- Handle<mirror::MethodType> callsite_type, \
- const uint32_t (&args)[Instruction::kMaxVarArgRegs], \
- uint32_t first_arg, \
- JValue* result)
-
-EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true, true);
-EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true, false);
-EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false, true);
-EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false, false);
+#define EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(_is_range) \
+ template REQUIRES_SHARED(Locks::mutator_lock_) \
+ bool DoInvokePolymorphic<_is_range>( \
+ Thread* self, \
+ ArtMethod* invoke_method, \
+ ShadowFrame& shadow_frame, \
+ Handle<mirror::MethodHandle> method_handle, \
+ Handle<mirror::MethodType> callsite_type, \
+ const uint32_t (&args)[Instruction::kMaxVarArgRegs], \
+ uint32_t first_arg, \
+ JValue* result)
+
+EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(true);
+EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL(false);
#undef EXPLICIT_DO_INVOKE_POLYMORPHIC_TEMPLATE_DECL
} // namespace art
diff --git a/runtime/method_handles.h b/runtime/method_handles.h
index 734d7c7bf4..5bea0ab5cd 100644
--- a/runtime/method_handles.h
+++ b/runtime/method_handles.h
@@ -27,7 +27,7 @@
namespace art {
namespace mirror {
- class MethodHandleImpl;
+ class MethodHandle;
class MethodType;
} // mirror
@@ -202,11 +202,11 @@ class ShadowFrameSetter {
size_t arg_index_;
};
-template <bool is_range, bool do_assignability_check>
+template <bool is_range>
bool DoInvokePolymorphic(Thread* self,
ArtMethod* invoke_method,
ShadowFrame& shadow_frame,
- Handle<mirror::MethodHandleImpl> method_handle,
+ Handle<mirror::MethodHandle> method_handle,
Handle<mirror::MethodType> callsite_type,
const uint32_t (&args)[Instruction::kMaxVarArgRegs],
uint32_t first_arg,
diff --git a/runtime/mirror/array.cc b/runtime/mirror/array.cc
index cc548b9cc8..f283ec3e9d 100644
--- a/runtime/mirror/array.cc
+++ b/runtime/mirror/array.cc
@@ -52,7 +52,7 @@ static Array* RecursiveCreateMultiArray(Thread* self,
Array::Alloc<true>(self, array_class.Get(), array_length,
array_class->GetComponentSizeShift(),
Runtime::Current()->GetHeap()->GetCurrentAllocator())));
- if (UNLIKELY(new_array.Get() == nullptr)) {
+ if (UNLIKELY(new_array == nullptr)) {
CHECK(self->IsExceptionPending());
return nullptr;
}
@@ -98,14 +98,14 @@ Array* Array::CreateMultiArray(Thread* self, Handle<Class> element_class,
StackHandleScope<1> hs(self);
MutableHandle<mirror::Class> array_class(
hs.NewHandle(class_linker->FindArrayClass(self, &element_class_ptr)));
- if (UNLIKELY(array_class.Get() == nullptr)) {
+ if (UNLIKELY(array_class == nullptr)) {
CHECK(self->IsExceptionPending());
return nullptr;
}
for (int32_t i = 1; i < dimensions->GetLength(); ++i) {
ObjPtr<mirror::Class> array_class_ptr = array_class.Get();
array_class.Assign(class_linker->FindArrayClass(self, &array_class_ptr));
- if (UNLIKELY(array_class.Get() == nullptr)) {
+ if (UNLIKELY(array_class == nullptr)) {
CHECK(self->IsExceptionPending());
return nullptr;
}
diff --git a/runtime/mirror/call_site.cc b/runtime/mirror/call_site.cc
new file mode 100644
index 0000000000..eb613df4c6
--- /dev/null
+++ b/runtime/mirror/call_site.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "call_site.h"
+
+#include "class-inl.h"
+#include "gc_root-inl.h"
+
+namespace art {
+namespace mirror {
+
+GcRoot<mirror::Class> CallSite::static_class_;
+
+mirror::CallSite* CallSite::Create(Thread* const self, Handle<MethodHandle> target) {
+ StackHandleScope<1> hs(self);
+ Handle<mirror::CallSite> cs(
+ hs.NewHandle(ObjPtr<CallSite>::DownCast(StaticClass()->AllocObject(self))));
+ CHECK(!Runtime::Current()->IsActiveTransaction());
+ cs->SetFieldObject<false>(TargetOffset(), target.Get());
+ return cs.Get();
+}
+
+void CallSite::SetClass(Class* klass) {
+ CHECK(static_class_.IsNull()) << static_class_.Read() << " " << klass;
+ CHECK(klass != nullptr);
+ static_class_ = GcRoot<Class>(klass);
+}
+
+void CallSite::ResetClass() {
+ CHECK(!static_class_.IsNull());
+ static_class_ = GcRoot<Class>(nullptr);
+}
+
+void CallSite::VisitRoots(RootVisitor* visitor) {
+ static_class_.VisitRootIfNonNull(visitor, RootInfo(kRootStickyClass));
+}
+
+} // namespace mirror
+} // namespace art
diff --git a/runtime/mirror/call_site.h b/runtime/mirror/call_site.h
new file mode 100644
index 0000000000..db244a5442
--- /dev/null
+++ b/runtime/mirror/call_site.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_MIRROR_CALL_SITE_H_
+#define ART_RUNTIME_MIRROR_CALL_SITE_H_
+
+#include "mirror/method_handle_impl.h"
+#include "utils.h"
+
+namespace art {
+
+struct CallSiteOffsets;
+
+namespace mirror {
+
+// C++ mirror of java.lang.invoke.CallSite
+class MANAGED CallSite : public Object {
+ public:
+ static mirror::CallSite* Create(Thread* const self,
+ Handle<MethodHandle> method_handle)
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
+
+ static mirror::Class* StaticClass() REQUIRES_SHARED(Locks::mutator_lock_) {
+ return static_class_.Read();
+ }
+
+ MethodHandle* GetTarget() REQUIRES_SHARED(Locks::mutator_lock_) {
+ return GetFieldObject<MethodHandle>(TargetOffset());
+ }
+
+ static void SetClass(Class* klass) REQUIRES_SHARED(Locks::mutator_lock_);
+ static void ResetClass() REQUIRES_SHARED(Locks::mutator_lock_);
+ static void VisitRoots(RootVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_);
+
+ private:
+ static inline MemberOffset TargetOffset() {
+ return MemberOffset(OFFSETOF_MEMBER(CallSite, target_));
+ }
+
+ HeapReference<mirror::MethodHandle> target_;
+
+ static GcRoot<mirror::Class> static_class_; // java.lang.invoke.CallSite.class
+
+ friend struct art::CallSiteOffsets; // for verifying offset information
+ DISALLOW_IMPLICIT_CONSTRUCTORS(CallSite);
+};
+
+} // namespace mirror
+} // namespace art
+
+#endif // ART_RUNTIME_MIRROR_CALL_SITE_H_
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 1b8f3f83e7..9a9a5d8398 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -81,7 +81,7 @@ ClassExt* Class::EnsureExtDataPresent(Thread* self) {
self->ClearException();
// Allocate the ClassExt
Handle<ClassExt> new_ext(hs.NewHandle(ClassExt::Alloc(self)));
- if (new_ext.Get() == nullptr) {
+ if (new_ext == nullptr) {
// OOM allocating the classExt.
// TODO Should we restore the suppressed exception?
self->AssertPendingOOMException();
@@ -103,7 +103,7 @@ ClassExt* Class::EnsureExtDataPresent(Thread* self) {
DCHECK(!set || h_this->GetExtData() == new_ext.Get());
CHECK(!ret.IsNull());
// Restore the exception if there was one.
- if (throwable.Get() != nullptr) {
+ if (throwable != nullptr) {
self->SetException(throwable.Get());
}
return ret.Ptr();
@@ -269,10 +269,10 @@ void Class::DumpClass(std::ostream& os, int flags) {
os << "----- " << (IsInterface() ? "interface" : "class") << " "
<< "'" << GetDescriptor(&temp) << "' cl=" << GetClassLoader() << " -----\n",
os << " objectSize=" << SizeOf() << " "
- << "(" << (h_super.Get() != nullptr ? h_super->SizeOf() : -1) << " from super)\n",
+ << "(" << (h_super != nullptr ? h_super->SizeOf() : -1) << " from super)\n",
os << StringPrintf(" access=0x%04x.%04x\n",
GetAccessFlags() >> 16, GetAccessFlags() & kAccJavaFlagsMask);
- if (h_super.Get() != nullptr) {
+ if (h_super != nullptr) {
os << " super='" << h_super->PrettyClass() << "' (cl=" << h_super->GetClassLoader()
<< ")\n";
}
@@ -297,7 +297,7 @@ void Class::DumpClass(std::ostream& os, int flags) {
} else {
// After this point, this may have moved due to GetDirectInterface.
os << " vtable (" << h_this->NumVirtualMethods() << " entries, "
- << (h_super.Get() != nullptr ? h_super->NumVirtualMethods() : 0) << " in super):\n";
+ << (h_super != nullptr ? h_super->NumVirtualMethods() : 0) << " in super):\n";
for (size_t i = 0; i < NumVirtualMethods(); ++i) {
os << StringPrintf(" %2zd: %s\n", i, ArtMethod::PrettyMethod(
h_this->GetVirtualMethodDuringLinking(i, image_pointer_size)).c_str());
@@ -971,7 +971,7 @@ ObjPtr<Class> Class::ResolveDirectInterface(Thread* self, Handle<Class> klass, u
}
ObjPtr<Class> Class::GetCommonSuperClass(Handle<Class> klass) {
- DCHECK(klass.Get() != nullptr);
+ DCHECK(klass != nullptr);
DCHECK(!klass->IsInterface());
DCHECK(!IsInterface());
ObjPtr<Class> common_super_class = this;
@@ -1165,7 +1165,7 @@ ObjPtr<Method> Class::GetDeclaredMethodInternal(
constexpr uint32_t kSkipModifiers = kAccMiranda | kAccSynthetic;
StackHandleScope<3> hs(self);
auto h_method_name = hs.NewHandle(name);
- if (UNLIKELY(h_method_name.Get() == nullptr)) {
+ if (UNLIKELY(h_method_name == nullptr)) {
ThrowNullPointerException("name == null");
return nullptr;
}
diff --git a/runtime/mirror/class_ext.cc b/runtime/mirror/class_ext.cc
index efd949e031..7270079a8f 100644
--- a/runtime/mirror/class_ext.cc
+++ b/runtime/mirror/class_ext.cc
@@ -58,8 +58,8 @@ bool ClassExt::ExtendObsoleteArrays(Thread* self, uint32_t increase) {
Handle<ObjectArray<DexCache>> old_dex_caches(hs.NewHandle(h_this->GetObsoleteDexCaches()));
ClassLinker* cl = Runtime::Current()->GetClassLinker();
size_t new_len;
- if (old_methods.Get() == nullptr) {
- CHECK(old_dex_caches.Get() == nullptr);
+ if (old_methods == nullptr) {
+ CHECK(old_dex_caches == nullptr);
new_len = increase;
} else {
CHECK_EQ(old_methods->GetLength(), old_dex_caches->GetLength());
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index a59bb7b880..973c8ed07d 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -26,6 +26,7 @@
#include "base/logging.h"
#include "gc_root.h"
#include "mirror/class.h"
+#include "mirror/call_site.h"
#include "mirror/method_type.h"
#include "runtime.h"
#include "obj_ptr.h"
@@ -106,6 +107,35 @@ inline void DexCache::SetResolvedMethodType(uint32_t proto_idx, MethodType* reso
Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this);
}
+inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) {
+ DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
+ DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
+ GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx];
+ Atomic<GcRoot<mirror::CallSite>>& ref =
+ reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
+ return ref.LoadSequentiallyConsistent().Read();
+}
+
+inline CallSite* DexCache::SetResolvedCallSite(uint32_t call_site_idx, CallSite* call_site) {
+ DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
+ DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
+
+ GcRoot<mirror::CallSite> null_call_site(nullptr);
+ GcRoot<mirror::CallSite> candidate(call_site);
+ GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx];
+
+ // The first assignment for a given call site wins.
+ Atomic<GcRoot<mirror::CallSite>>& ref =
+ reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
+ if (ref.CompareExchangeStrongSequentiallyConsistent(null_call_site, candidate)) {
+ // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
+ Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this);
+ return call_site;
+ } else {
+ return target.Read();
+ }
+}
+
inline ArtField* DexCache::GetResolvedField(uint32_t field_idx, PointerSize ptr_size) {
DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
DCHECK_LT(field_idx, NumResolvedFields()); // NOTE: Unchecked, i.e. not throwing AIOOB.
@@ -208,6 +238,11 @@ inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visito
VisitDexCachePairs<mirror::MethodType, kReadBarrierOption, Visitor>(
GetResolvedMethodTypes(), NumResolvedMethodTypes(), visitor);
+
+ GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites();
+ for (size_t i = 0, num_call_sites = NumResolvedCallSites(); i != num_call_sites; ++i) {
+ visitor.VisitRootIfNonNull(resolved_call_sites[i].AddressWithoutBarrier());
+ }
}
}
@@ -246,6 +281,17 @@ inline void DexCache::FixupResolvedMethodTypes(mirror::MethodTypeDexCacheType* d
}
}
+template <ReadBarrierOption kReadBarrierOption, typename Visitor>
+inline void DexCache::FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest,
+ const Visitor& visitor) {
+ GcRoot<mirror::CallSite>* src = GetResolvedCallSites();
+ for (size_t i = 0, count = NumResolvedCallSites(); i < count; ++i) {
+ mirror::CallSite* source = src[i].Read<kReadBarrierOption>();
+ mirror::CallSite* new_source = visitor(source);
+ dest[i] = GcRoot<mirror::CallSite>(new_source);
+ }
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index 741cf3bb47..0f6acab7e1 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -90,6 +90,10 @@ void DexCache::InitializeDexCache(Thread* self,
raw_arrays + layout.MethodTypesOffset());
}
+ GcRoot<mirror::CallSite>* call_sites = (dex_file->NumCallSiteIds() == 0)
+ ? nullptr
+ : reinterpret_cast<GcRoot<mirror::CallSite>*>(raw_arrays + layout.CallSitesOffset());
+
DCHECK_ALIGNED(raw_arrays, alignof(mirror::StringDexCacheType)) <<
"Expected raw_arrays to align to StringDexCacheType.";
DCHECK_ALIGNED(layout.StringsOffset(), alignof(mirror::StringDexCacheType)) <<
@@ -117,6 +121,9 @@ void DexCache::InitializeDexCache(Thread* self,
CHECK_EQ(method_types[i].load(std::memory_order_relaxed).index, 0u);
CHECK(method_types[i].load(std::memory_order_relaxed).object.IsNull());
}
+ for (size_t i = 0; i < dex_file->NumCallSiteIds(); ++i) {
+ CHECK(call_sites[i].IsNull());
+ }
}
if (strings != nullptr) {
mirror::StringDexCachePair::Initialize(strings);
@@ -136,6 +143,8 @@ void DexCache::InitializeDexCache(Thread* self,
dex_file->NumFieldIds(),
method_types,
num_method_types,
+ call_sites,
+ dex_file->NumCallSiteIds(),
image_pointer_size);
}
@@ -151,6 +160,8 @@ void DexCache::Init(const DexFile* dex_file,
uint32_t num_resolved_fields,
MethodTypeDexCacheType* resolved_method_types,
uint32_t num_resolved_method_types,
+ GcRoot<CallSite>* resolved_call_sites,
+ uint32_t num_resolved_call_sites,
PointerSize pointer_size) {
CHECK(dex_file != nullptr);
CHECK(location != nullptr);
@@ -159,6 +170,7 @@ void DexCache::Init(const DexFile* dex_file,
CHECK_EQ(num_resolved_methods != 0u, resolved_methods != nullptr);
CHECK_EQ(num_resolved_fields != 0u, resolved_fields != nullptr);
CHECK_EQ(num_resolved_method_types != 0u, resolved_method_types != nullptr);
+ CHECK_EQ(num_resolved_call_sites != 0u, resolved_call_sites != nullptr);
SetDexFile(dex_file);
SetLocation(location);
@@ -167,11 +179,13 @@ void DexCache::Init(const DexFile* dex_file,
SetResolvedMethods(resolved_methods);
SetResolvedFields(resolved_fields);
SetResolvedMethodTypes(resolved_method_types);
+ SetResolvedCallSites(resolved_call_sites);
SetField32<false>(NumStringsOffset(), num_strings);
SetField32<false>(NumResolvedTypesOffset(), num_resolved_types);
SetField32<false>(NumResolvedMethodsOffset(), num_resolved_methods);
SetField32<false>(NumResolvedFieldsOffset(), num_resolved_fields);
SetField32<false>(NumResolvedMethodTypesOffset(), num_resolved_method_types);
+ SetField32<false>(NumResolvedCallSitesOffset(), num_resolved_call_sites);
Runtime* const runtime = Runtime::Current();
if (runtime->HasResolutionMethod()) {
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 6f88cc5df4..10bb5aa01c 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -36,6 +36,7 @@ class Thread;
namespace mirror {
+class CallSite;
class MethodType;
class String;
@@ -163,6 +164,10 @@ class MANAGED DexCache FINAL : public Object {
void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
+ void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
String* GetLocation() REQUIRES_SHARED(Locks::mutator_lock_) {
return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_));
}
@@ -191,6 +196,10 @@ class MANAGED DexCache FINAL : public Object {
return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_);
}
+ static MemberOffset ResolvedCallSitesOffset() {
+ return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_call_sites_);
+ }
+
static MemberOffset NumStringsOffset() {
return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_);
}
@@ -211,6 +220,10 @@ class MANAGED DexCache FINAL : public Object {
return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_);
}
+ static MemberOffset NumResolvedCallSitesOffset() {
+ return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_);
+ }
+
mirror::String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -244,7 +257,18 @@ class MANAGED DexCache FINAL : public Object {
MethodType* GetResolvedMethodType(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
- void SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved) REQUIRES_SHARED(Locks::mutator_lock_);
+ void SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ CallSite* GetResolvedCallSite(uint32_t call_site_idx) REQUIRES_SHARED(Locks::mutator_lock_);
+
+ // Attempts to bind |call_site_idx| to the call site |resolved|. The
+ // caller must use the return value in place of |resolved|. This is
+ // because multiple threads can invoke the bootstrap method each
+ // producing a call site, but the method handle invocation on the
+ // call site must be on a common agreed value.
+ CallSite* SetResolvedCallSite(uint32_t call_site_idx, CallSite* resolved) WARN_UNUSED
+ REQUIRES_SHARED(Locks::mutator_lock_);
StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
return GetFieldPtr64<StringDexCacheType*>(StringsOffset());
@@ -295,6 +319,18 @@ class MANAGED DexCache FINAL : public Object {
SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types);
}
+ GcRoot<CallSite>* GetResolvedCallSites()
+ ALWAYS_INLINE
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ return GetFieldPtr<GcRoot<CallSite>*>(ResolvedCallSitesOffset());
+ }
+
+ void SetResolvedCallSites(GcRoot<CallSite>* resolved_call_sites)
+ ALWAYS_INLINE
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ SetFieldPtr<false>(ResolvedCallSitesOffset(), resolved_call_sites);
+ }
+
size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) {
return GetField32(NumStringsOffset());
}
@@ -315,6 +351,10 @@ class MANAGED DexCache FINAL : public Object {
return GetField32(NumResolvedMethodTypesOffset());
}
+ size_t NumResolvedCallSites() REQUIRES_SHARED(Locks::mutator_lock_) {
+ return GetField32(NumResolvedCallSitesOffset());
+ }
+
const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_));
}
@@ -346,8 +386,10 @@ class MANAGED DexCache FINAL : public Object {
uint32_t num_resolved_methods,
ArtField** resolved_fields,
uint32_t num_resolved_fields,
- MethodTypeDexCacheType* resolved_methodtypes,
- uint32_t num_resolved_methodtypes,
+ MethodTypeDexCacheType* resolved_method_types,
+ uint32_t num_resolved_method_types,
+ GcRoot<CallSite>* resolved_call_sites,
+ uint32_t num_resolved_call_sites,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -362,6 +404,8 @@ class MANAGED DexCache FINAL : public Object {
HeapReference<Object> dex_;
HeapReference<String> location_;
uint64_t dex_file_; // const DexFile*
+ uint64_t resolved_call_sites_; // GcRoot<CallSite>* array with num_resolved_call_sites_
+ // elements.
uint64_t resolved_fields_; // ArtField*, array with num_resolved_fields_ elements.
uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with
// num_resolved_method_types_ elements.
@@ -370,6 +414,7 @@ class MANAGED DexCache FINAL : public Object {
uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_
// elements.
+ uint32_t num_resolved_call_sites_; // Number of elements in the call_sites_ array.
uint32_t num_resolved_fields_; // Number of elements in the resolved_fields_ array.
uint32_t num_resolved_method_types_; // Number of elements in the resolved_method_types_ array.
uint32_t num_resolved_methods_; // Number of elements in the resolved_methods_ array.
diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc
index 8f978e122c..5a2ab7151c 100644
--- a/runtime/mirror/dex_cache_test.cc
+++ b/runtime/mirror/dex_cache_test.cc
@@ -47,7 +47,7 @@ TEST_F(DexCacheTest, Open) {
soa.Self(),
*java_lang_dex_file_,
Runtime::Current()->GetLinearAlloc())));
- ASSERT_TRUE(dex_cache.Get() != nullptr);
+ ASSERT_TRUE(dex_cache != nullptr);
EXPECT_TRUE(dex_cache->StaticStringSize() == dex_cache->NumStrings()
|| java_lang_dex_file_->NumStringIds() == dex_cache->NumStrings());
@@ -95,10 +95,10 @@ TEST_F(DexCacheTest, TestResolvedFieldAccess) {
soa.Decode<mirror::ClassLoader>(jclass_loader)));
Handle<mirror::Class> klass1 =
hs.NewHandle(class_linker_->FindClass(soa.Self(), "Lpackage1/Package1;", class_loader));
- ASSERT_TRUE(klass1.Get() != nullptr);
+ ASSERT_TRUE(klass1 != nullptr);
Handle<mirror::Class> klass2 =
hs.NewHandle(class_linker_->FindClass(soa.Self(), "Lpackage2/Package2;", class_loader));
- ASSERT_TRUE(klass2.Get() != nullptr);
+ ASSERT_TRUE(klass2 != nullptr);
EXPECT_EQ(klass1->GetDexCache(), klass2->GetDexCache());
EXPECT_NE(klass1->NumStaticFields(), 0u);
diff --git a/runtime/mirror/emulated_stack_frame.cc b/runtime/mirror/emulated_stack_frame.cc
index 978cc32320..be0eac05c9 100644
--- a/runtime/mirror/emulated_stack_frame.cc
+++ b/runtime/mirror/emulated_stack_frame.cc
@@ -173,13 +173,13 @@ mirror::EmulatedStackFrame* EmulatedStackFrame::CreateFromShadowFrameAndArgs(
Handle<mirror::ObjectArray<mirror::Object>> references(hs.NewHandle(
mirror::ObjectArray<mirror::Object>::Alloc(self, array_class, refs_size)));
- if (references.Get() == nullptr) {
+ if (references == nullptr) {
DCHECK(self->IsExceptionPending());
return nullptr;
}
Handle<ByteArray> stack_frame(hs.NewHandle(ByteArray::Alloc(self, frame_size)));
- if (stack_frame.Get() == nullptr) {
+ if (stack_frame == nullptr) {
DCHECK(self->IsExceptionPending());
return nullptr;
}
diff --git a/runtime/mirror/field-inl.h b/runtime/mirror/field-inl.h
index c03f20a991..2496989337 100644
--- a/runtime/mirror/field-inl.h
+++ b/runtime/mirror/field-inl.h
@@ -33,7 +33,7 @@ inline mirror::Field* Field::CreateFromArtField(Thread* self, ArtField* field, b
// Try to resolve type before allocating since this is a thread suspension point.
Handle<mirror::Class> type = hs.NewHandle(field->GetType<true>());
- if (type.Get() == nullptr) {
+ if (type == nullptr) {
if (force_resolve) {
if (kIsDebugBuild) {
self->AssertPendingException();
@@ -49,7 +49,7 @@ inline mirror::Field* Field::CreateFromArtField(Thread* self, ArtField* field, b
}
}
auto ret = hs.NewHandle(ObjPtr<Field>::DownCast(StaticClass()->AllocObject(self)));
- if (UNLIKELY(ret.Get() == nullptr)) {
+ if (UNLIKELY(ret == nullptr)) {
self->AssertPendingOOMException();
return nullptr;
}
diff --git a/runtime/mirror/method_handle_impl.cc b/runtime/mirror/method_handle_impl.cc
index 4f1c448b56..fa4d25a031 100644
--- a/runtime/mirror/method_handle_impl.cc
+++ b/runtime/mirror/method_handle_impl.cc
@@ -28,6 +28,18 @@ mirror::Class* MethodHandle::StaticClass() {
return klass;
}
+void MethodHandle::Initialize(uintptr_t art_field_or_method,
+ Kind kind,
+ Handle<MethodType> method_type)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ CHECK(!Runtime::Current()->IsActiveTransaction());
+ SetFieldObject<false>(CachedSpreadInvokerOffset(), nullptr);
+ SetFieldObject<false>(NominalTypeOffset(), nullptr);
+ SetFieldObject<false>(MethodTypeOffset(), method_type.Get());
+ SetField32<false>(HandleKindOffset(), static_cast<uint32_t>(kind));
+ SetField64<false>(ArtFieldOrMethodOffset(), art_field_or_method);
+}
+
GcRoot<mirror::Class> MethodHandleImpl::static_class_;
void MethodHandleImpl::SetClass(Class* klass) {
@@ -45,5 +57,17 @@ void MethodHandleImpl::VisitRoots(RootVisitor* visitor) {
static_class_.VisitRootIfNonNull(visitor, RootInfo(kRootStickyClass));
}
+mirror::MethodHandleImpl* MethodHandleImpl::Create(Thread* const self,
+ uintptr_t art_field_or_method,
+ MethodHandle::Kind kind,
+ Handle<MethodType> method_type)
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_) {
+ StackHandleScope<1> hs(self);
+ Handle<mirror::MethodHandleImpl> mh(
+ hs.NewHandle(ObjPtr<MethodHandleImpl>::DownCast(StaticClass()->AllocObject(self))));
+ mh->Initialize(art_field_or_method, kind, method_type);
+ return mh.Get();
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/method_handle_impl.h b/runtime/mirror/method_handle_impl.h
index 53d267b52c..9938af8abf 100644
--- a/runtime/mirror/method_handle_impl.h
+++ b/runtime/mirror/method_handle_impl.h
@@ -17,10 +17,11 @@
#ifndef ART_RUNTIME_MIRROR_METHOD_HANDLE_IMPL_H_
#define ART_RUNTIME_MIRROR_METHOD_HANDLE_IMPL_H_
+#include "art_field.h"
+#include "art_method.h"
#include "class.h"
#include "gc_root.h"
#include "object-inl.h"
-#include "method_handles.h"
#include "method_type.h"
namespace art {
@@ -82,10 +83,19 @@ class MANAGED MethodHandle : public Object {
GetField64(OFFSET_OF_OBJECT_MEMBER(MethodHandle, art_field_or_method_)));
}
+ ObjPtr<mirror::Class> GetTargetClass() REQUIRES_SHARED(Locks::mutator_lock_) {
+ Kind kind = GetHandleKind();
+ return (kind <= kLastValidKind) ?
+ GetTargetMethod()->GetDeclaringClass() : GetTargetField()->GetDeclaringClass();
+ }
+
static mirror::Class* StaticClass() REQUIRES_SHARED(Locks::mutator_lock_);
+ protected:
+ void Initialize(uintptr_t art_field_or_method, Kind kind, Handle<MethodType> method_type)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
private:
- // NOTE: cached_spread_invoker_ isn't used by the runtime.
HeapReference<mirror::MethodHandle> cached_spread_invoker_;
HeapReference<mirror::MethodType> nominal_type_;
HeapReference<mirror::MethodType> method_type_;
@@ -93,6 +103,9 @@ class MANAGED MethodHandle : public Object {
uint64_t art_field_or_method_;
private:
+ static MemberOffset CachedSpreadInvokerOffset() {
+ return MemberOffset(OFFSETOF_MEMBER(MethodHandle, cached_spread_invoker_));
+ }
static MemberOffset NominalTypeOffset() {
return MemberOffset(OFFSETOF_MEMBER(MethodHandle, nominal_type_));
}
@@ -113,6 +126,12 @@ class MANAGED MethodHandle : public Object {
// C++ mirror of java.lang.invoke.MethodHandleImpl
class MANAGED MethodHandleImpl : public MethodHandle {
public:
+ static mirror::MethodHandleImpl* Create(Thread* const self,
+ uintptr_t art_field_or_method,
+ MethodHandle::Kind kind,
+ Handle<MethodType> method_type)
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
+
static mirror::Class* StaticClass() REQUIRES_SHARED(Locks::mutator_lock_) {
return static_class_.Read();
}
diff --git a/runtime/mirror/method_handles_lookup.cc b/runtime/mirror/method_handles_lookup.cc
new file mode 100644
index 0000000000..c758e54dd4
--- /dev/null
+++ b/runtime/mirror/method_handles_lookup.cc
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "method_handles_lookup.h"
+
+#include "class.h"
+#include "gc_root-inl.h"
+#include "object-inl.h"
+#include "handle_scope.h"
+#include "modifiers.h"
+
+namespace art {
+namespace mirror {
+
+GcRoot<mirror::Class> MethodHandlesLookup::static_class_;
+
+void MethodHandlesLookup::SetClass(Class* klass) {
+ CHECK(static_class_.IsNull()) << static_class_.Read() << " " << klass;
+ CHECK(klass != nullptr);
+ static_class_ = GcRoot<Class>(klass);
+}
+
+void MethodHandlesLookup::ResetClass() {
+ CHECK(!static_class_.IsNull());
+ static_class_ = GcRoot<Class>(nullptr);
+}
+
+void MethodHandlesLookup::VisitRoots(RootVisitor* visitor) {
+ static_class_.VisitRootIfNonNull(visitor, RootInfo(kRootStickyClass));
+}
+
+MethodHandlesLookup* MethodHandlesLookup::Create(Thread* const self, Handle<Class> lookup_class)
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_) {
+ static constexpr uint32_t kAllModes = kAccPublic | kAccPrivate | kAccProtected | kAccStatic;
+
+ StackHandleScope<1> hs(self);
+ Handle<MethodHandlesLookup> mhl(
+ hs.NewHandle(ObjPtr<MethodHandlesLookup>::DownCast(StaticClass()->AllocObject(self))));
+ mhl->SetFieldObject<false>(LookupClassOffset(), lookup_class.Get());
+ mhl->SetField32<false>(AllowedModesOffset(), kAllModes);
+ return mhl.Get();
+}
+
+} // namespace mirror
+} // namespace art
diff --git a/runtime/mirror/method_handles_lookup.h b/runtime/mirror/method_handles_lookup.h
new file mode 100644
index 0000000000..63eb428f94
--- /dev/null
+++ b/runtime/mirror/method_handles_lookup.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_MIRROR_METHOD_HANDLES_LOOKUP_H_
+#define ART_RUNTIME_MIRROR_METHOD_HANDLES_LOOKUP_H_
+
+#include "obj_ptr.h"
+#include "gc_root.h"
+#include "object.h"
+#include "handle.h"
+#include "utils.h"
+
+namespace art {
+
+struct MethodHandlesLookupOffsets;
+class RootVisitor;
+
+namespace mirror {
+
+// C++ mirror of java.lang.invoke.MethodHandles.Lookup
+class MANAGED MethodHandlesLookup : public Object {
+ public:
+ static mirror::MethodHandlesLookup* Create(Thread* const self,
+ Handle<Class> lookup_class)
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
+
+ static mirror::Class* StaticClass() REQUIRES_SHARED(Locks::mutator_lock_) {
+ return static_class_.Read();
+ }
+
+ static void SetClass(Class* klass) REQUIRES_SHARED(Locks::mutator_lock_);
+ static void ResetClass() REQUIRES_SHARED(Locks::mutator_lock_);
+ static void VisitRoots(RootVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_);
+
+ private:
+ static MemberOffset AllowedModesOffset() {
+ return MemberOffset(OFFSETOF_MEMBER(MethodHandlesLookup, allowed_modes_));
+ }
+
+ static MemberOffset LookupClassOffset() {
+ return MemberOffset(OFFSETOF_MEMBER(MethodHandlesLookup, lookup_class_));
+ }
+
+ HeapReference<mirror::Class> lookup_class_;
+
+ int32_t allowed_modes_;
+
+ static GcRoot<mirror::Class> static_class_; // java.lang.invoke.MethodHandles.Lookup.class
+
+ friend struct art::MethodHandlesLookupOffsets; // for verifying offset information
+ DISALLOW_IMPLICIT_CONSTRUCTORS(MethodHandlesLookup);
+};
+
+} // namespace mirror
+} // namespace art
+
+#endif // ART_RUNTIME_MIRROR_METHOD_HANDLES_LOOKUP_H_
diff --git a/runtime/mirror/method_type.cc b/runtime/mirror/method_type.cc
index 5d77a16e7d..4b8dfacec6 100644
--- a/runtime/mirror/method_type.cc
+++ b/runtime/mirror/method_type.cc
@@ -44,6 +44,22 @@ mirror::MethodType* MethodType::Create(Thread* const self,
return mt.Get();
}
+size_t MethodType::NumberOfVRegs() REQUIRES_SHARED(Locks::mutator_lock_) {
+ mirror::ObjectArray<Class>* const p_types = GetPTypes();
+ const int32_t p_types_length = p_types->GetLength();
+
+ // Initialize |num_vregs| with number of parameters and only increment it for
+ // types requiring a second vreg.
+ size_t num_vregs = static_cast<size_t>(p_types_length);
+ for (int32_t i = 0; i < p_types_length; ++i) {
+ mirror::Class* klass = p_types->GetWithoutChecks(i);
+ if (klass->IsPrimitiveLong() || klass->IsPrimitiveDouble()) {
+ ++num_vregs;
+ }
+ }
+ return num_vregs;
+}
+
bool MethodType::IsExactMatch(mirror::MethodType* target) REQUIRES_SHARED(Locks::mutator_lock_) {
mirror::ObjectArray<Class>* const p_types = GetPTypes();
const int32_t params_length = p_types->GetLength();
diff --git a/runtime/mirror/method_type.h b/runtime/mirror/method_type.h
index 9a98143144..374bbe5df3 100644
--- a/runtime/mirror/method_type.h
+++ b/runtime/mirror/method_type.h
@@ -44,6 +44,10 @@ class MANAGED MethodType : public Object {
return GetFieldObject<ObjectArray<Class>>(OFFSET_OF_OBJECT_MEMBER(MethodType, p_types_));
}
+ // Number of virtual registers required to hold the parameters for
+ // this method type.
+ size_t NumberOfVRegs() REQUIRES_SHARED(Locks::mutator_lock_);
+
Class* GetRType() REQUIRES_SHARED(Locks::mutator_lock_) {
return GetFieldObject<Class>(OFFSET_OF_OBJECT_MEMBER(MethodType, r_type_));
}
diff --git a/runtime/mirror/method_type_test.cc b/runtime/mirror/method_type_test.cc
index 637bafd75e..41231ef617 100644
--- a/runtime/mirror/method_type_test.cc
+++ b/runtime/mirror/method_type_test.cc
@@ -51,7 +51,7 @@ static mirror::MethodType* CreateMethodType(const std::string& return_type,
Handle<mirror::Class> return_clazz = hs.NewHandle(class_linker->FindClass(
soa.Self(), FullyQualifiedType(return_type).c_str(), boot_class_loader));
- CHECK(return_clazz.Get() != nullptr);
+ CHECK(return_clazz != nullptr);
ObjPtr<mirror::Class> class_type = mirror::Class::GetJavaLangClass();
mirror::Class* class_array_type = class_linker->FindArrayClass(self, &class_type);
diff --git a/runtime/mirror/object_test.cc b/runtime/mirror/object_test.cc
index 6a4ec9dca7..e761e4db7a 100644
--- a/runtime/mirror/object_test.cc
+++ b/runtime/mirror/object_test.cc
@@ -530,8 +530,8 @@ TEST_F(ObjectTest, InstanceOf) {
Handle<Object> x(hs.NewHandle(X->AllocObject(soa.Self())));
Handle<Object> y(hs.NewHandle(Y->AllocObject(soa.Self())));
- ASSERT_TRUE(x.Get() != nullptr);
- ASSERT_TRUE(y.Get() != nullptr);
+ ASSERT_TRUE(x != nullptr);
+ ASSERT_TRUE(y != nullptr);
EXPECT_TRUE(x->InstanceOf(X));
EXPECT_FALSE(x->InstanceOf(Y));
@@ -650,7 +650,7 @@ TEST_F(ObjectTest, FindInstanceField) {
ScopedObjectAccess soa(Thread::Current());
StackHandleScope<1> hs(soa.Self());
Handle<String> s(hs.NewHandle(String::AllocFromModifiedUtf8(soa.Self(), "ABC")));
- ASSERT_TRUE(s.Get() != nullptr);
+ ASSERT_TRUE(s != nullptr);
Class* c = s->GetClass();
ASSERT_TRUE(c != nullptr);
@@ -684,9 +684,9 @@ TEST_F(ObjectTest, FindStaticField) {
ScopedObjectAccess soa(Thread::Current());
StackHandleScope<4> hs(soa.Self());
Handle<String> s(hs.NewHandle(String::AllocFromModifiedUtf8(soa.Self(), "ABC")));
- ASSERT_TRUE(s.Get() != nullptr);
+ ASSERT_TRUE(s != nullptr);
Handle<Class> c(hs.NewHandle(s->GetClass()));
- ASSERT_TRUE(c.Get() != nullptr);
+ ASSERT_TRUE(c != nullptr);
// Wrong type.
EXPECT_TRUE(c->FindDeclaredStaticField("CASE_INSENSITIVE_ORDER", "I") == nullptr);
diff --git a/runtime/mirror/string-inl.h b/runtime/mirror/string-inl.h
index 9b8445dc9e..c2407d7772 100644
--- a/runtime/mirror/string-inl.h
+++ b/runtime/mirror/string-inl.h
@@ -308,7 +308,7 @@ inline int32_t String::GetHashCode() {
}
template<typename MemoryType>
-bool String::AllASCII(const MemoryType* const chars, const int length) {
+inline bool String::AllASCII(const MemoryType* chars, const int length) {
static_assert(std::is_unsigned<MemoryType>::value, "Expecting unsigned MemoryType");
for (int i = 0; i < length; ++i) {
// Valid ASCII characters are in range 1..0x7f. Zero is not considered ASCII
@@ -320,6 +320,13 @@ bool String::AllASCII(const MemoryType* const chars, const int length) {
return true;
}
+inline bool String::DexFileStringAllASCII(const char* chars, const int length) {
+ // For strings from the dex file we just need to check that
+ // the terminating character is at the right position.
+ DCHECK_EQ(AllASCII(reinterpret_cast<const uint8_t*>(chars), length), chars[length] == 0);
+ return chars[length] == 0;
+}
+
} // namespace mirror
} // namespace art
diff --git a/runtime/mirror/string.h b/runtime/mirror/string.h
index 409c6c2896..38f6dd4b6f 100644
--- a/runtime/mirror/string.h
+++ b/runtime/mirror/string.h
@@ -184,7 +184,9 @@ class MANAGED String FINAL : public Object {
bool IsValueNull() REQUIRES_SHARED(Locks::mutator_lock_);
template<typename MemoryType>
- static bool AllASCII(const MemoryType* const chars, const int length);
+ static bool AllASCII(const MemoryType* chars, const int length);
+
+ static bool DexFileStringAllASCII(const char* chars, const int length);
ALWAYS_INLINE static bool IsCompressed(int32_t count) {
return GetCompressionFlagFromCount(count) == StringCompressionFlag::kCompressed;
diff --git a/runtime/monitor.cc b/runtime/monitor.cc
index a32003e81b..f3cb0df80e 100644
--- a/runtime/monitor.cc
+++ b/runtime/monitor.cc
@@ -1380,7 +1380,7 @@ void MonitorList::Add(Monitor* m) {
while (!kUseReadBarrier && UNLIKELY(!allow_new_monitors_)) {
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
// presence of threads blocking for weak ref access.
- self->CheckEmptyCheckpoint();
+ self->CheckEmptyCheckpointFromWeakRefAccess(&monitor_list_lock_);
monitor_add_condition_.WaitHoldingLocks(self);
}
list_.push_front(m);
diff --git a/runtime/monitor_test.cc b/runtime/monitor_test.cc
index 4fbfe4781c..27ce149342 100644
--- a/runtime/monitor_test.cc
+++ b/runtime/monitor_test.cc
@@ -77,7 +77,7 @@ static void FillHeap(Thread* self, ClassLinker* class_linker,
while (length > 10) {
MutableHandle<mirror::Object> h((*hsp)->NewHandle<mirror::Object>(
mirror::ObjectArray<mirror::Object>::Alloc(self, ca.Get(), length / 4)));
- if (self->IsExceptionPending() || h.Get() == nullptr) {
+ if (self->IsExceptionPending() || h == nullptr) {
self->ClearException();
// Try a smaller length
@@ -95,7 +95,7 @@ static void FillHeap(Thread* self, ClassLinker* class_linker,
// Allocate simple objects till it fails.
while (!self->IsExceptionPending()) {
MutableHandle<mirror::Object> h = (*hsp)->NewHandle<mirror::Object>(c->AllocObject(self));
- if (!self->IsExceptionPending() && h.Get() != nullptr) {
+ if (!self->IsExceptionPending() && h != nullptr) {
handles->push_back(h);
}
}
diff --git a/runtime/native/dalvik_system_VMRuntime.cc b/runtime/native/dalvik_system_VMRuntime.cc
index 24308d9e81..6bfccdc8fb 100644
--- a/runtime/native/dalvik_system_VMRuntime.cc
+++ b/runtime/native/dalvik_system_VMRuntime.cc
@@ -350,7 +350,7 @@ static void PreloadDexCachesResolveField(Handle<mirror::DexCache> dex_cache, uin
Thread* const self = Thread::Current();
StackHandleScope<1> hs(self);
Handle<mirror::Class> klass(hs.NewHandle(dex_cache->GetResolvedType(field_id.class_idx_)));
- if (klass.Get() == nullptr) {
+ if (klass == nullptr) {
return;
}
if (is_static) {
@@ -512,7 +512,7 @@ static void VMRuntime_preloadDexCaches(JNIEnv* env, jobject) {
CHECK(dex_file != nullptr);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::DexCache> dex_cache(hs.NewHandle(linker->RegisterDexFile(*dex_file, nullptr)));
- CHECK(dex_cache.Get() != nullptr); // Boot class path dex caches are never unloaded.
+ CHECK(dex_cache != nullptr); // Boot class path dex caches are never unloaded.
if (kPreloadDexCachesStrings) {
for (size_t j = 0; j < dex_cache->NumStrings(); j++) {
PreloadDexCachesResolveString(dex_cache, dex::StringIndex(j), strings);
diff --git a/runtime/native/java_lang_Class.cc b/runtime/native/java_lang_Class.cc
index 5438a6ddb4..256787b2a1 100644
--- a/runtime/native/java_lang_Class.cc
+++ b/runtime/native/java_lang_Class.cc
@@ -81,7 +81,7 @@ static jclass Class_classForName(JNIEnv* env, jclass, jstring javaName, jboolean
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Handle<mirror::Class> c(
hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor.c_str(), class_loader)));
- if (c.Get() == nullptr) {
+ if (c == nullptr) {
ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
env->ExceptionClear();
jthrowable cnfe = reinterpret_cast<jthrowable>(
@@ -137,7 +137,7 @@ static mirror::ObjectArray<mirror::Field>* GetDeclaredFields(
size_t array_idx = 0;
auto object_array = hs.NewHandle(mirror::ObjectArray<mirror::Field>::Alloc(
self, mirror::Field::ArrayClass(), array_size));
- if (object_array.Get() == nullptr) {
+ if (object_array == nullptr) {
return nullptr;
}
for (ArtField& field : ifields) {
@@ -267,7 +267,7 @@ static mirror::Field* GetPublicFieldRecursive(
Handle<mirror::String> h_name(hs.NewHandle(name));
// We search the current class, its direct interfaces then its superclass.
- while (h_clazz.Get() != nullptr) {
+ while (h_clazz != nullptr) {
mirror::Field* result = GetDeclaredField(self, h_clazz.Get(), h_name.Get());
if ((result != nullptr) && (result->GetAccessFlags() & kAccPublic)) {
return result;
@@ -319,14 +319,14 @@ static jobject Class_getDeclaredField(JNIEnv* env, jobject javaThis, jstring nam
ScopedFastNativeObjectAccess soa(env);
StackHandleScope<3> hs(soa.Self());
Handle<mirror::String> h_string = hs.NewHandle(soa.Decode<mirror::String>(name));
- if (h_string.Get() == nullptr) {
+ if (h_string == nullptr) {
ThrowNullPointerException("name == null");
return nullptr;
}
Handle<mirror::Class> h_klass = hs.NewHandle(DecodeClass(soa, javaThis));
Handle<mirror::Field> result =
hs.NewHandle(GetDeclaredField(soa.Self(), h_klass.Get(), h_string.Get()));
- if (result.Get() == nullptr) {
+ if (result == nullptr) {
std::string name_str = h_string->ToModifiedUtf8();
if (name_str == "value" && h_klass->IsStringClass()) {
// We log the error for this specific case, as the user might just swallow the exception.
@@ -377,7 +377,7 @@ static jobjectArray Class_getDeclaredConstructorsInternal(
}
auto h_constructors = hs.NewHandle(mirror::ObjectArray<mirror::Constructor>::Alloc(
soa.Self(), mirror::Constructor::ArrayClass(), constructor_count));
- if (UNLIKELY(h_constructors.Get() == nullptr)) {
+ if (UNLIKELY(h_constructors == nullptr)) {
soa.Self()->AssertPendingException();
return nullptr;
}
@@ -428,7 +428,7 @@ static jobjectArray Class_getDeclaredMethodsUnchecked(JNIEnv* env, jobject javaT
}
auto ret = hs.NewHandle(mirror::ObjectArray<mirror::Method>::Alloc(
soa.Self(), mirror::Method::ArrayClass(), num_methods));
- if (ret.Get() == nullptr) {
+ if (ret == nullptr) {
soa.Self()->AssertPendingOOMException();
return nullptr;
}
@@ -645,7 +645,7 @@ static jobject Class_newInstance(JNIEnv* env, jobject javaThis) {
// Verify that we can access the class.
if (!klass->IsPublic()) {
caller.Assign(GetCallingClass(soa.Self(), 1));
- if (caller.Get() != nullptr && !caller->CanAccess(klass.Get())) {
+ if (caller != nullptr && !caller->CanAccess(klass.Get())) {
soa.Self()->ThrowNewExceptionF(
"Ljava/lang/IllegalAccessException;", "%s is not accessible from %s",
klass->PrettyClass().c_str(), caller->PrettyClass().c_str());
@@ -673,17 +673,17 @@ static jobject Class_newInstance(JNIEnv* env, jobject javaThis) {
}
}
auto receiver = hs.NewHandle(klass->AllocObject(soa.Self()));
- if (UNLIKELY(receiver.Get() == nullptr)) {
+ if (UNLIKELY(receiver == nullptr)) {
soa.Self()->AssertPendingOOMException();
return nullptr;
}
// Verify that we can access the constructor.
auto* declaring_class = constructor->GetDeclaringClass();
if (!constructor->IsPublic()) {
- if (caller.Get() == nullptr) {
+ if (caller == nullptr) {
caller.Assign(GetCallingClass(soa.Self(), 1));
}
- if (UNLIKELY(caller.Get() != nullptr && !VerifyAccess(receiver.Get(),
+ if (UNLIKELY(caller != nullptr && !VerifyAccess(receiver.Get(),
declaring_class,
constructor->GetAccessFlags(),
caller.Get()))) {
diff --git a/runtime/native/java_lang_invoke_MethodHandleImpl.cc b/runtime/native/java_lang_invoke_MethodHandleImpl.cc
index 72a37f875d..9113841909 100644
--- a/runtime/native/java_lang_invoke_MethodHandleImpl.cc
+++ b/runtime/native/java_lang_invoke_MethodHandleImpl.cc
@@ -57,7 +57,7 @@ static jobject MethodHandleImpl_getMemberInternal(JNIEnv* env, jobject thiz) {
}
}
- if (UNLIKELY(h_object.Get() == nullptr)) {
+ if (UNLIKELY(h_object == nullptr)) {
soa.Self()->AssertPendingOOMException();
return nullptr;
}
diff --git a/runtime/native/java_lang_reflect_Executable.cc b/runtime/native/java_lang_reflect_Executable.cc
index ee59c4a9e2..2a3942829f 100644
--- a/runtime/native/java_lang_reflect_Executable.cc
+++ b/runtime/native/java_lang_reflect_Executable.cc
@@ -103,7 +103,7 @@ static jobjectArray Executable_getParameters0(JNIEnv* env, jobject javaMethod) {
}
// Validate the MethodParameters system annotation data.
- if (UNLIKELY(names.Get() == nullptr || access_flags.Get() == nullptr)) {
+ if (UNLIKELY(names == nullptr || access_flags == nullptr)) {
ThrowIllegalArgumentException(
StringPrintf("Missing parameter metadata for names or access flags for %s",
art_method->PrettyMethod().c_str()).c_str());
@@ -132,7 +132,7 @@ static jobjectArray Executable_getParameters0(JNIEnv* env, jobject javaMethod) {
mirror::ObjectArray<mirror::Object>::Alloc(self,
parameter_array_class.Get(),
names_count));
- if (UNLIKELY(parameter_array.Get() == nullptr)) {
+ if (UNLIKELY(parameter_array == nullptr)) {
self->AssertPendingException();
return nullptr;
}
@@ -154,7 +154,7 @@ static jobjectArray Executable_getParameters0(JNIEnv* env, jobject javaMethod) {
// Allocate / initialize the Parameter to add to parameter_array.
parameter.Assign(parameter_class->AllocObject(self));
- if (UNLIKELY(parameter.Get() == nullptr)) {
+ if (UNLIKELY(parameter == nullptr)) {
self->AssertPendingOOMException();
return nullptr;
}
diff --git a/runtime/native/libcore_util_CharsetUtils.cc b/runtime/native/libcore_util_CharsetUtils.cc
index 2590452678..e51b6d2a89 100644
--- a/runtime/native/libcore_util_CharsetUtils.cc
+++ b/runtime/native/libcore_util_CharsetUtils.cc
@@ -155,7 +155,7 @@ static jbyteArray charsToBytes(JNIEnv* env, jstring java_string, jint offset, ji
ScopedObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::String> string(hs.NewHandle(soa.Decode<mirror::String>(java_string)));
- if (string.Get() == nullptr) {
+ if (string == nullptr) {
return nullptr;
}
@@ -192,7 +192,7 @@ static jbyteArray CharsetUtils_toUtf8Bytes(JNIEnv* env, jclass, jstring java_str
ScopedObjectAccess soa(env);
StackHandleScope<1> hs(soa.Self());
Handle<mirror::String> string(hs.NewHandle(soa.Decode<mirror::String>(java_string)));
- if (string.Get() == nullptr) {
+ if (string == nullptr) {
return nullptr;
}
diff --git a/runtime/oat.h b/runtime/oat.h
index e7e8328796..0f6657b7ed 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,7 +32,7 @@ class InstructionSetFeatures;
class PACKED(4) OatHeader {
public:
static constexpr uint8_t kOatMagic[] = { 'o', 'a', 't', '\n' };
- static constexpr uint8_t kOatVersion[] = { '1', '1', '0', '\0' }; // Clean up code info change.
+ static constexpr uint8_t kOatVersion[] = { '1', '1', '2', '\0' }; // Manual bump (Revert^3 hash-based DexCache types; stack maps).
static constexpr const char* kImageLocationKey = "image-location";
static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
diff --git a/runtime/oat_file.cc b/runtime/oat_file.cc
index 31eb1ccdc8..493da271d1 100644
--- a/runtime/oat_file.cc
+++ b/runtime/oat_file.cc
@@ -273,6 +273,36 @@ inline static bool ReadOatDexFileData(const OatFile& oat_file,
return true;
}
+static bool FindDexFileMapItem(const uint8_t* dex_begin,
+ const uint8_t* dex_end,
+ DexFile::MapItemType map_item_type,
+ const DexFile::MapItem** result_item) {
+ *result_item = nullptr;
+
+ const DexFile::Header* header =
+ BoundsCheckedCast<const DexFile::Header*>(dex_begin, dex_begin, dex_end);
+ if (nullptr == header) return false;
+
+ if (!DexFile::IsMagicValid(header->magic_)) return true; // Not a dex file, not an error.
+
+ const DexFile::MapList* map_list =
+ BoundsCheckedCast<const DexFile::MapList*>(dex_begin + header->map_off_, dex_begin, dex_end);
+ if (nullptr == map_list) return false;
+
+ const DexFile::MapItem* map_item = map_list->list_;
+ size_t count = map_list->size_;
+ while (count--) {
+ if (map_item->type_ == static_cast<uint16_t>(map_item_type)) {
+ *result_item = map_item;
+ break;
+ }
+ map_item = BoundsCheckedCast<const DexFile::MapItem*>(map_item + 1, dex_begin, dex_end);
+ if (nullptr == map_item) return false;
+ }
+
+ return true;
+}
+
bool OatFileBase::Setup(const char* abs_dex_location, std::string* error_msg) {
if (!GetOatHeader().IsValid()) {
std::string cause = GetOatHeader().GetValidationErrorMessage();
@@ -501,7 +531,19 @@ bool OatFileBase::Setup(const char* abs_dex_location, std::string* error_msg) {
uint8_t* current_dex_cache_arrays = nullptr;
if (dex_cache_arrays != nullptr) {
- DexCacheArraysLayout layout(pointer_size, *header);
+ // All DexCache types except for CallSite have their instance counts in the
+ // DexFile header. For CallSites, we need to read the info from the MapList.
+ const DexFile::MapItem* call_sites_item = nullptr;
+ if (!FindDexFileMapItem(DexBegin(),
+ DexEnd(),
+ DexFile::MapItemType::kDexTypeCallSiteIdItem,
+ &call_sites_item)) {
+ *error_msg = StringPrintf("In oat file '%s' could not read data from truncated DexFile map",
+ GetLocation().c_str());
+ return false;
+ }
+ size_t num_call_sites = call_sites_item == nullptr ? 0 : call_sites_item->size_;
+ DexCacheArraysLayout layout(pointer_size, *header, num_call_sites);
if (layout.Size() != 0u) {
if (static_cast<size_t>(dex_cache_arrays_end - dex_cache_arrays) < layout.Size()) {
*error_msg = StringPrintf("In oat file '%s' found OatDexFile #%zu for '%s' with "
@@ -1468,77 +1510,6 @@ std::string OatFile::EncodeDexFileDependencies(const std::vector<const DexFile*>
return out.str();
}
-bool OatFile::CheckStaticDexFileDependencies(const char* dex_dependencies, std::string* msg) {
- if (dex_dependencies == nullptr || dex_dependencies[0] == 0) {
- // No dependencies.
- return true;
- }
-
- // Assumption: this is not performance-critical. So it's OK to do this with a std::string and
- // Split() instead of manual parsing of the combined char*.
- std::vector<std::string> split;
- Split(dex_dependencies, kDexClassPathEncodingSeparator, &split);
- if (split.size() % 2 != 0) {
- // Expected pairs of location and checksum.
- *msg = StringPrintf("Odd number of elements in dependency list %s", dex_dependencies);
- return false;
- }
-
- for (auto it = split.begin(), end = split.end(); it != end; it += 2) {
- std::string& location = *it;
- std::string& checksum = *(it + 1);
- int64_t converted = strtoll(checksum.c_str(), nullptr, 10);
- if (converted == 0) {
- // Conversion error.
- *msg = StringPrintf("Conversion error for %s", checksum.c_str());
- return false;
- }
-
- uint32_t dex_checksum;
- std::string error_msg;
- if (DexFile::GetChecksum(DexFile::GetDexCanonicalLocation(location.c_str()).c_str(),
- &dex_checksum,
- &error_msg)) {
- if (converted != dex_checksum) {
- *msg = StringPrintf("Checksums don't match for %s: %" PRId64 " vs %u",
- location.c_str(), converted, dex_checksum);
- return false;
- }
- } else {
- // Problem retrieving checksum.
- // TODO: odex files?
- *msg = StringPrintf("Could not retrieve checksum for %s: %s", location.c_str(),
- error_msg.c_str());
- return false;
- }
- }
-
- return true;
-}
-
-bool OatFile::GetDexLocationsFromDependencies(const char* dex_dependencies,
- std::vector<std::string>* locations) {
- DCHECK(locations != nullptr);
- if (dex_dependencies == nullptr || dex_dependencies[0] == 0) {
- return true;
- }
-
- // Assumption: this is not performance-critical. So it's OK to do this with a std::string and
- // Split() instead of manual parsing of the combined char*.
- std::vector<std::string> split;
- Split(dex_dependencies, kDexClassPathEncodingSeparator, &split);
- if (split.size() % 2 != 0) {
- // Expected pairs of location and checksum.
- return false;
- }
-
- for (auto it = split.begin(), end = split.end(); it != end; it += 2) {
- locations->push_back(*it);
- }
-
- return true;
-}
-
OatFile::OatClass OatFile::FindOatClass(const DexFile& dex_file,
uint16_t class_def_idx,
bool* found) {
diff --git a/runtime/oat_file.h b/runtime/oat_file.h
index 111755e7a1..d24283afee 100644
--- a/runtime/oat_file.h
+++ b/runtime/oat_file.h
@@ -290,15 +290,6 @@ class OatFile {
// Create a dependency list (dex locations and checksums) for the given dex files.
static std::string EncodeDexFileDependencies(const std::vector<const DexFile*>& dex_files);
- // Check the given dependency list against their dex files - thus the name "Static," this does
- // not check the class-loader environment, only whether there have been file updates.
- static bool CheckStaticDexFileDependencies(const char* dex_dependencies, std::string* msg);
-
- // Get the dex locations of a dependency list. Note: this is *not* cleaned for synthetic
- // locations of multidex files.
- static bool GetDexLocationsFromDependencies(const char* dex_dependencies,
- std::vector<std::string>* locations);
-
// Finds the associated oat class for a dex_file and descriptor. Returns an invalid OatClass on
// error and sets found to false.
static OatClass FindOatClass(const DexFile& dex_file, uint16_t class_def_idx, bool* found);
diff --git a/runtime/oat_file_assistant.cc b/runtime/oat_file_assistant.cc
index 77cdd28d3a..5ae2fc51b7 100644
--- a/runtime/oat_file_assistant.cc
+++ b/runtime/oat_file_assistant.cc
@@ -38,6 +38,8 @@
namespace art {
+using android::base::StringPrintf;
+
std::ostream& operator << (std::ostream& stream, const OatFileAssistant::OatStatus status) {
switch (status) {
case OatFileAssistant::kOatCannotOpen:
@@ -264,7 +266,7 @@ std::vector<std::unique_ptr<const DexFile>> OatFileAssistant::LoadDexFiles(
const OatFile& oat_file, const char* dex_location) {
std::vector<std::unique_ptr<const DexFile>> dex_files;
- // Load the primary dex file.
+ // Load the main dex file.
std::string error_msg;
const OatFile::OatDexFile* oat_dex_file = oat_file.GetOatDexFile(
dex_location, nullptr, &error_msg);
@@ -280,12 +282,12 @@ std::vector<std::unique_ptr<const DexFile>> OatFileAssistant::LoadDexFiles(
}
dex_files.push_back(std::move(dex_file));
- // Load secondary multidex files
+ // Load the rest of the multidex entries
for (size_t i = 1; ; i++) {
- std::string secondary_dex_location = DexFile::GetMultiDexLocation(i, dex_location);
- oat_dex_file = oat_file.GetOatDexFile(secondary_dex_location.c_str(), nullptr);
+ std::string multidex_dex_location = DexFile::GetMultiDexLocation(i, dex_location);
+ oat_dex_file = oat_file.GetOatDexFile(multidex_dex_location.c_str(), nullptr);
if (oat_dex_file == nullptr) {
- // There are no more secondary dex files to load.
+ // There are no more multidex entries to load.
break;
}
@@ -300,10 +302,10 @@ std::vector<std::unique_ptr<const DexFile>> OatFileAssistant::LoadDexFiles(
}
bool OatFileAssistant::HasOriginalDexFiles() {
- // Ensure GetRequiredDexChecksum has been run so that
+ // Ensure GetRequiredDexChecksums has been run so that
// has_original_dex_files_ is initialized. We don't care about the result of
- // GetRequiredDexChecksum.
- GetRequiredDexChecksum();
+ // GetRequiredDexChecksums.
+ GetRequiredDexChecksums();
return has_original_dex_files_;
}
@@ -316,88 +318,66 @@ OatFileAssistant::OatStatus OatFileAssistant::OatFileStatus() {
}
bool OatFileAssistant::DexChecksumUpToDate(const VdexFile& file, std::string* error_msg) {
- if (file.GetHeader().GetNumberOfDexFiles() <= 0) {
- VLOG(oat) << "Vdex does not contain any dex files";
+ const std::vector<uint32_t>* required_dex_checksums = GetRequiredDexChecksums();
+ if (required_dex_checksums == nullptr) {
+ LOG(WARNING) << "Required dex checksums not found. Assuming dex checksums are up to date.";
+ return true;
+ }
+
+ uint32_t number_of_dex_files = file.GetHeader().GetNumberOfDexFiles();
+ if (required_dex_checksums->size() != number_of_dex_files) {
+ *error_msg = StringPrintf("expected %zu dex files but found %u",
+ required_dex_checksums->size(),
+ number_of_dex_files);
return false;
}
- // TODO: Use GetRequiredDexChecksum to get secondary checksums as well, not
- // just the primary. Because otherwise we may fail to see a secondary
- // checksum failure in the case when the original (multidex) files are
- // stripped but we have a newer odex file.
- const uint32_t* dex_checksum_pointer = GetRequiredDexChecksum();
- if (dex_checksum_pointer != nullptr) {
- uint32_t actual_checksum = file.GetLocationChecksum(0);
- if (*dex_checksum_pointer != actual_checksum) {
- VLOG(oat) << "Dex checksum does not match for primary dex: " << dex_location_
- << ". Expected: " << *dex_checksum_pointer
- << ", Actual: " << actual_checksum;
+ for (uint32_t i = 0; i < number_of_dex_files; i++) {
+ uint32_t expected_checksum = (*required_dex_checksums)[i];
+ uint32_t actual_checksum = file.GetLocationChecksum(i);
+ if (expected_checksum != actual_checksum) {
+ std::string dex = DexFile::GetMultiDexLocation(i, dex_location_.c_str());
+ *error_msg = StringPrintf("Dex checksum does not match for dex: %s."
+ "Expected: %u, actual: %u",
+ dex.c_str(),
+ expected_checksum,
+ actual_checksum);
return false;
}
}
- // Verify the dex checksums for any secondary multidex files
- for (uint32_t i = 1; i < file.GetHeader().GetNumberOfDexFiles(); i++) {
- std::string secondary_dex_location = DexFile::GetMultiDexLocation(i, dex_location_.c_str());
- uint32_t expected_secondary_checksum = 0;
- if (DexFile::GetChecksum(secondary_dex_location.c_str(),
- &expected_secondary_checksum,
- error_msg)) {
- uint32_t actual_secondary_checksum = file.GetLocationChecksum(i);
- if (expected_secondary_checksum != actual_secondary_checksum) {
- VLOG(oat) << "Dex checksum does not match for secondary dex: "
- << secondary_dex_location
- << ". Expected: " << expected_secondary_checksum
- << ", Actual: " << actual_secondary_checksum;
- return false;
- }
- } else {
- // If we can't get the checksum for the secondary location, we assume
- // the dex checksum is up to date for this and all other secondary dex
- // files.
- break;
- }
- }
return true;
}
bool OatFileAssistant::DexChecksumUpToDate(const OatFile& file, std::string* error_msg) {
- // Note: GetOatDexFile will return null if the dex checksum doesn't match
- // what we provide, which verifies the primary dex checksum for us.
- const uint32_t* dex_checksum_pointer = GetRequiredDexChecksum();
- const OatFile::OatDexFile* oat_dex_file = file.GetOatDexFile(
- dex_location_.c_str(), dex_checksum_pointer, error_msg);
- if (oat_dex_file == nullptr) {
+ const std::vector<uint32_t>* required_dex_checksums = GetRequiredDexChecksums();
+ if (required_dex_checksums == nullptr) {
+ LOG(WARNING) << "Required dex checksums not found. Assuming dex checksums are up to date.";
+ return true;
+ }
+
+ uint32_t number_of_dex_files = file.GetOatHeader().GetDexFileCount();
+ if (required_dex_checksums->size() != number_of_dex_files) {
+ *error_msg = StringPrintf("expected %zu dex files but found %u",
+ required_dex_checksums->size(),
+ number_of_dex_files);
return false;
}
- // Verify the dex checksums for any secondary multidex files
- for (size_t i = 1; ; i++) {
- std::string secondary_dex_location = DexFile::GetMultiDexLocation(i, dex_location_.c_str());
- const OatFile::OatDexFile* secondary_oat_dex_file
- = file.GetOatDexFile(secondary_dex_location.c_str(), nullptr);
- if (secondary_oat_dex_file == nullptr) {
- // There are no more secondary dex files to check.
- break;
+ for (uint32_t i = 0; i < number_of_dex_files; i++) {
+ std::string dex = DexFile::GetMultiDexLocation(i, dex_location_.c_str());
+ uint32_t expected_checksum = (*required_dex_checksums)[i];
+ const OatFile::OatDexFile* oat_dex_file = file.GetOatDexFile(dex.c_str(), nullptr);
+ if (oat_dex_file == nullptr) {
+ *error_msg = StringPrintf("failed to find %s in %s", dex.c_str(), file.GetLocation().c_str());
+ return false;
}
-
- uint32_t expected_secondary_checksum = 0;
- if (DexFile::GetChecksum(secondary_dex_location.c_str(),
- &expected_secondary_checksum, error_msg)) {
- uint32_t actual_secondary_checksum
- = secondary_oat_dex_file->GetDexFileLocationChecksum();
- if (expected_secondary_checksum != actual_secondary_checksum) {
- VLOG(oat) << "Dex checksum does not match for secondary dex: "
- << secondary_dex_location
- << ". Expected: " << expected_secondary_checksum
- << ", Actual: " << actual_secondary_checksum;
- return false;
- }
- } else {
- // If we can't get the checksum for the secondary location, we assume
- // the dex checksum is up to date for this and all other secondary dex
- // files.
- break;
+ uint32_t actual_checksum = oat_dex_file->GetDexFileLocationChecksum();
+ if (expected_checksum != actual_checksum) {
+ VLOG(oat) << "Dex checksum does not match for dex: " << dex
+ << ". Expected: " << expected_checksum
+ << ", Actual: " << actual_checksum;
+ return false;
}
}
return true;
@@ -710,13 +690,16 @@ std::string OatFileAssistant::ImageLocation() {
return image_spaces[0]->GetImageLocation();
}
-const uint32_t* OatFileAssistant::GetRequiredDexChecksum() {
- if (!required_dex_checksum_attempted_) {
- required_dex_checksum_attempted_ = true;
- required_dex_checksum_found_ = false;
+const std::vector<uint32_t>* OatFileAssistant::GetRequiredDexChecksums() {
+ if (!required_dex_checksums_attempted_) {
+ required_dex_checksums_attempted_ = true;
+ required_dex_checksums_found_ = false;
+ cached_required_dex_checksums_.clear();
std::string error_msg;
- if (DexFile::GetChecksum(dex_location_.c_str(), &cached_required_dex_checksum_, &error_msg)) {
- required_dex_checksum_found_ = true;
+ if (DexFile::GetMultiDexChecksums(dex_location_.c_str(),
+ &cached_required_dex_checksums_,
+ &error_msg)) {
+ required_dex_checksums_found_ = true;
has_original_dex_files_ = true;
} else {
// This can happen if the original dex file has been stripped from the
@@ -724,19 +707,23 @@ const uint32_t* OatFileAssistant::GetRequiredDexChecksum() {
VLOG(oat) << "OatFileAssistant: " << error_msg;
has_original_dex_files_ = false;
- // Get the checksum from the odex if we can.
+ // Get the checksums from the odex if we can.
const OatFile* odex_file = odex_.GetFile();
if (odex_file != nullptr) {
- const OatFile::OatDexFile* odex_dex_file
- = odex_file->GetOatDexFile(dex_location_.c_str(), nullptr);
- if (odex_dex_file != nullptr) {
- cached_required_dex_checksum_ = odex_dex_file->GetDexFileLocationChecksum();
- required_dex_checksum_found_ = true;
+ required_dex_checksums_found_ = true;
+ for (size_t i = 0; i < odex_file->GetOatHeader().GetDexFileCount(); i++) {
+ std::string dex = DexFile::GetMultiDexLocation(i, dex_location_.c_str());
+ const OatFile::OatDexFile* odex_dex_file = odex_file->GetOatDexFile(dex.c_str(), nullptr);
+ if (odex_dex_file == nullptr) {
+ required_dex_checksums_found_ = false;
+ break;
+ }
+ cached_required_dex_checksums_.push_back(odex_dex_file->GetDexFileLocationChecksum());
}
}
}
}
- return required_dex_checksum_found_ ? &cached_required_dex_checksum_ : nullptr;
+ return required_dex_checksums_found_ ? &cached_required_dex_checksums_ : nullptr;
}
const OatFileAssistant::ImageInfo* OatFileAssistant::GetImageInfo() {
diff --git a/runtime/oat_file_assistant.h b/runtime/oat_file_assistant.h
index 6d47ad2228..3ede29f5e0 100644
--- a/runtime/oat_file_assistant.h
+++ b/runtime/oat_file_assistant.h
@@ -400,13 +400,13 @@ class OatFileAssistant {
// the oat file assistant.
static std::string ImageLocation();
- // Gets the dex checksum required for an up-to-date oat file.
- // Returns dex_checksum if a required checksum was located. Returns
- // null if the required checksum was not found.
- // The caller shouldn't clean up or free the returned pointer.
- // This sets the has_original_dex_files_ field to true if a checksum was
- // found for the dex_location_ dex file.
- const uint32_t* GetRequiredDexChecksum();
+ // Gets the dex checksums required for an up-to-date oat file.
+ // Returns cached_required_dex_checksums if the required checksums were
+ // located. Returns null if the required checksums were not found. The
+ // caller shouldn't clean up or free the returned pointer. This sets the
+ // has_original_dex_files_ field to true if the checksums were found for the
+ // dex_location_ dex file.
+ const std::vector<uint32_t>* GetRequiredDexChecksums();
// Returns the loaded image info.
// Loads the image info if needed. Returns null if the image info failed
@@ -430,11 +430,11 @@ class OatFileAssistant {
// Whether we will attempt to load oat files executable.
bool load_executable_ = false;
- // Cached value of the required dex checksum.
- // This should be accessed only by the GetRequiredDexChecksum() method.
- uint32_t cached_required_dex_checksum_;
- bool required_dex_checksum_attempted_ = false;
- bool required_dex_checksum_found_;
+ // Cached value of the required dex checksums.
+ // This should be accessed only by the GetRequiredDexChecksums() method.
+ std::vector<uint32_t> cached_required_dex_checksums_;
+ bool required_dex_checksums_attempted_ = false;
+ bool required_dex_checksums_found_;
bool has_original_dex_files_;
OatFileInfo odex_;
diff --git a/runtime/oat_file_assistant_test.cc b/runtime/oat_file_assistant_test.cc
index f777340cfd..9b35489330 100644
--- a/runtime/oat_file_assistant_test.cc
+++ b/runtime/oat_file_assistant_test.cc
@@ -237,16 +237,16 @@ TEST_F(OatFileAssistantTest, MultiDexOatUpToDate) {
EXPECT_EQ(2u, dex_files.size());
}
-// Case: We have a MultiDEX file where the secondary dex file is out of date.
+// Case: We have a MultiDEX file where the non-main multdex entry is out of date.
// Expect: The status is kDex2OatNeeded.
-TEST_F(OatFileAssistantTest, MultiDexSecondaryOutOfDate) {
- std::string dex_location = GetScratchDir() + "/MultiDexSecondaryOutOfDate.jar";
+TEST_F(OatFileAssistantTest, MultiDexNonMainOutOfDate) {
+ std::string dex_location = GetScratchDir() + "/MultiDexNonMainOutOfDate.jar";
// Compile code for GetMultiDexSrc1.
Copy(GetMultiDexSrc1(), dex_location);
GenerateOatForTest(dex_location.c_str(), CompilerFilter::kSpeed);
- // Now overwrite the dex file with GetMultiDexSrc2 so the secondary checksum
+ // Now overwrite the dex file with GetMultiDexSrc2 so the non-main checksum
// is out of date.
Copy(GetMultiDexSrc2(), dex_location);
@@ -256,6 +256,37 @@ TEST_F(OatFileAssistantTest, MultiDexSecondaryOutOfDate) {
EXPECT_TRUE(oat_file_assistant.HasOriginalDexFiles());
}
+// Case: We have a stripped MultiDEX file where the non-main multidex entry is
+// out of date with respect to the odex file.
+TEST_F(OatFileAssistantTest, StrippedMultiDexNonMainOutOfDate) {
+ std::string dex_location = GetScratchDir() + "/StrippedMultiDexNonMainOutOfDate.jar";
+ std::string odex_location = GetOdexDir() + "/StrippedMultiDexNonMainOutOfDate.odex";
+
+ // Compile the oat from GetMultiDexSrc1.
+ Copy(GetMultiDexSrc1(), dex_location);
+ GenerateOatForTest(dex_location.c_str(), CompilerFilter::kSpeed);
+
+ // Compile the odex from GetMultiDexSrc2, which has a different non-main
+ // dex checksum.
+ Copy(GetMultiDexSrc2(), dex_location);
+ GenerateOdexForTest(dex_location, odex_location, CompilerFilter::kInterpretOnly);
+
+ // Strip the dex file.
+ Copy(GetStrippedDexSrc1(), dex_location);
+
+ OatFileAssistant oat_file_assistant(dex_location.c_str(), kRuntimeISA, /*load_executable*/false);
+
+ // Because the dex file is stripped, the odex file is considered the source
+ // of truth for the dex checksums. The oat file should be considered
+ // unusable.
+ std::unique_ptr<OatFile> best_file = oat_file_assistant.GetBestOatFile();
+ ASSERT_TRUE(best_file.get() != nullptr);
+ EXPECT_EQ(best_file->GetLocation(), odex_location);
+ EXPECT_FALSE(oat_file_assistant.HasOriginalDexFiles());
+ EXPECT_EQ(OatFileAssistant::kOatUpToDate, oat_file_assistant.OdexFileStatus());
+ EXPECT_EQ(OatFileAssistant::kOatDexOutOfDate, oat_file_assistant.OatFileStatus());
+}
+
// Case: We have a MultiDEX file and up-to-date OAT file for it with relative
// encoded dex locations.
// Expect: The oat file status is kNoDexOptNeeded.
@@ -336,16 +367,16 @@ TEST_F(OatFileAssistantTest, VdexDexOutOfDate) {
oat_file_assistant.GetDexOptNeeded(CompilerFilter::kSpeed));
}
-// Case: We have a MultiDEX (ODEX) VDEX file where the secondary dex file is
-// out of date and there is no corresponding ODEX file.
-TEST_F(OatFileAssistantTest, VdexMultiDexSecondaryOutOfDate) {
+// Case: We have a MultiDEX (ODEX) VDEX file where the non-main multidex entry
+// is out of date and there is no corresponding ODEX file.
+TEST_F(OatFileAssistantTest, VdexMultiDexNonMainOutOfDate) {
// This test case is only meaningful if vdex is enabled.
if (!kIsVdexEnabled) {
return;
}
- std::string dex_location = GetScratchDir() + "/VdexMultiDexSecondaryOutOfDate.jar";
- std::string oat_location = GetOdexDir() + "/VdexMultiDexSecondaryOutOfDate.oat";
+ std::string dex_location = GetScratchDir() + "/VdexMultiDexNonMainOutOfDate.jar";
+ std::string oat_location = GetOdexDir() + "/VdexMultiDexNonMainOutOfDate.oat";
Copy(GetMultiDexSrc1(), dex_location);
GenerateOdexForTest(dex_location, oat_location, CompilerFilter::kSpeed);
@@ -1028,7 +1059,7 @@ TEST_F(OatFileAssistantTest, DexOptStatusValues) {
ClassLinker* linker = Runtime::Current()->GetClassLinker();
Handle<mirror::Class> dexfile(
hs.NewHandle(linker->FindSystemClass(soa.Self(), "Ldalvik/system/DexFile;")));
- ASSERT_FALSE(dexfile.Get() == nullptr);
+ ASSERT_FALSE(dexfile == nullptr);
linker->EnsureInitialized(soa.Self(), dexfile, true, true);
for (std::pair<OatFileAssistant::DexOptNeeded, const char*> field : mapping) {
diff --git a/runtime/oat_file_manager.cc b/runtime/oat_file_manager.cc
index a46b47075c..70796148a4 100644
--- a/runtime/oat_file_manager.cc
+++ b/runtime/oat_file_manager.cc
@@ -342,7 +342,7 @@ static void GetDexFilesFromDexElementsArray(
ScopedObjectAccessAlreadyRunnable& soa,
Handle<mirror::ObjectArray<mirror::Object>> dex_elements,
std::priority_queue<DexFileAndClassPair>* queue) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (dex_elements.Get() == nullptr) {
+ if (dex_elements == nullptr) {
// Nothing to do.
return;
}
@@ -463,14 +463,14 @@ bool OatFileManager::HasCollisions(const OatFile* oat_file,
hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader));
Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements =
hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Object>>(dex_elements));
- if (h_class_loader.Get() != nullptr &&
+ if (h_class_loader != nullptr &&
GetDexFilesFromClassLoader(soa, h_class_loader.Get(), &queue)) {
class_loader_ok = true;
// In this case, also take into account the dex_elements array, if given. We don't need to
// read it otherwise, as we'll compare against all open oat files anyways.
GetDexFilesFromDexElementsArray(soa, h_dex_elements, &queue);
- } else if (h_class_loader.Get() != nullptr) {
+ } else if (h_class_loader != nullptr) {
VLOG(class_linker) << "Something unsupported with "
<< mirror::Class::PrettyClass(h_class_loader->GetClass());
}
@@ -658,7 +658,7 @@ std::vector<std::unique_ptr<const DexFile>> OatFileManager::OpenDexFilesFromOat(
Handle<mirror::ClassLoader> h_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader)));
// Can not load app image without class loader.
- if (h_loader.Get() != nullptr) {
+ if (h_loader != nullptr) {
std::string temp_error_msg;
// Add image space has a race condition since other threads could be reading from the
// spaces array.
diff --git a/runtime/oat_file_test.cc b/runtime/oat_file_test.cc
index b416b9dbad..d5fe1f382a 100644
--- a/runtime/oat_file_test.cc
+++ b/runtime/oat_file_test.cc
@@ -62,54 +62,4 @@ TEST_F(OatFileTest, ResolveRelativeEncodedDexLocation) {
"/data/app/foo/base.apk", "o/base.apk"));
}
-static std::vector<const DexFile*> ToConstDexFiles(
- const std::vector<std::unique_ptr<const DexFile>>& in) {
- std::vector<const DexFile*> ret;
- for (auto& d : in) {
- ret.push_back(d.get());
- }
- return ret;
-}
-
-TEST_F(OatFileTest, DexFileDependencies) {
- std::string error_msg;
-
- // No dependencies.
- EXPECT_TRUE(OatFile::CheckStaticDexFileDependencies(nullptr, &error_msg)) << error_msg;
- EXPECT_TRUE(OatFile::CheckStaticDexFileDependencies("", &error_msg)) << error_msg;
-
- // Ill-formed dependencies.
- EXPECT_FALSE(OatFile::CheckStaticDexFileDependencies("abc", &error_msg));
- EXPECT_FALSE(OatFile::CheckStaticDexFileDependencies("abc*123*def", &error_msg));
- EXPECT_FALSE(OatFile::CheckStaticDexFileDependencies("abc*def*", &error_msg));
-
- // Unsatisfiable dependency.
- EXPECT_FALSE(OatFile::CheckStaticDexFileDependencies("abc*123*", &error_msg));
-
- // Load some dex files to be able to do a real test.
- ScopedObjectAccess soa(Thread::Current());
-
- std::vector<std::unique_ptr<const DexFile>> dex_files1 = OpenTestDexFiles("Main");
- std::vector<const DexFile*> dex_files_const1 = ToConstDexFiles(dex_files1);
- std::string encoding1 = OatFile::EncodeDexFileDependencies(dex_files_const1);
- EXPECT_TRUE(OatFile::CheckStaticDexFileDependencies(encoding1.c_str(), &error_msg))
- << error_msg << " " << encoding1;
- std::vector<std::string> split1;
- EXPECT_TRUE(OatFile::GetDexLocationsFromDependencies(encoding1.c_str(), &split1));
- ASSERT_EQ(split1.size(), 1U);
- EXPECT_EQ(split1[0], dex_files_const1[0]->GetLocation());
-
- std::vector<std::unique_ptr<const DexFile>> dex_files2 = OpenTestDexFiles("MultiDex");
- EXPECT_GT(dex_files2.size(), 1U);
- std::vector<const DexFile*> dex_files_const2 = ToConstDexFiles(dex_files2);
- std::string encoding2 = OatFile::EncodeDexFileDependencies(dex_files_const2);
- EXPECT_TRUE(OatFile::CheckStaticDexFileDependencies(encoding2.c_str(), &error_msg))
- << error_msg << " " << encoding2;
- std::vector<std::string> split2;
- EXPECT_TRUE(OatFile::GetDexLocationsFromDependencies(encoding2.c_str(), &split2));
- ASSERT_EQ(split2.size(), 2U);
- EXPECT_EQ(split2[0], dex_files_const2[0]->GetLocation());
- EXPECT_EQ(split2[1], dex_files_const2[1]->GetLocation());
-}
-
} // namespace art
diff --git a/runtime/object_lock.cc b/runtime/object_lock.cc
index 39ab52fb2d..f6db544276 100644
--- a/runtime/object_lock.cc
+++ b/runtime/object_lock.cc
@@ -24,7 +24,7 @@ namespace art {
template <typename T>
ObjectLock<T>::ObjectLock(Thread* self, Handle<T> object) : self_(self), obj_(object) {
- CHECK(object.Get() != nullptr);
+ CHECK(object != nullptr);
obj_->MonitorEnter(self_);
}
@@ -50,7 +50,7 @@ void ObjectLock<T>::NotifyAll() {
template <typename T>
ObjectTryLock<T>::ObjectTryLock(Thread* self, Handle<T> object) : self_(self), obj_(object) {
- CHECK(object.Get() != nullptr);
+ CHECK(object != nullptr);
acquired_ = obj_->MonitorTryEnter(self_) != nullptr;
}
diff --git a/runtime/openjdkjvmti/ti_class.cc b/runtime/openjdkjvmti/ti_class.cc
index c14fd84264..7ca233fb10 100644
--- a/runtime/openjdkjvmti/ti_class.cc
+++ b/runtime/openjdkjvmti/ti_class.cc
@@ -42,12 +42,17 @@
#include "class_linker.h"
#include "common_throws.h"
#include "events-inl.h"
+#include "gc/heap.h"
+#include "gc_root.h"
#include "handle.h"
#include "jni_env_ext-inl.h"
#include "jni_internal.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
#include "mirror/class_ext.h"
+#include "mirror/object_reference.h"
+#include "mirror/object-inl.h"
+#include "mirror/reference.h"
#include "runtime.h"
#include "runtime_callbacks.h"
#include "ScopedLocalRef.h"
@@ -261,15 +266,22 @@ struct ClassCallback : public art::ClassLoadCallback {
thread_jni.get(),
jklass.get());
}
- AddTempClass(thread, jklass.get());
+ if (klass->IsTemp()) {
+ AddTempClass(thread, jklass.get());
+ }
}
}
- void ClassPrepare(art::Handle<art::mirror::Class> temp_klass ATTRIBUTE_UNUSED,
+ void ClassPrepare(art::Handle<art::mirror::Class> temp_klass,
art::Handle<art::mirror::Class> klass)
REQUIRES_SHARED(art::Locks::mutator_lock_) {
if (event_handler->IsEventEnabledAnywhere(ArtJvmtiEvent::kClassPrepare)) {
art::Thread* thread = art::Thread::Current();
+ if (temp_klass.Get() != klass.Get()) {
+ DCHECK(temp_klass->IsTemp());
+ DCHECK(temp_klass->IsRetired());
+ HandleTempClass(thread, temp_klass, klass);
+ }
ScopedLocalRef<jclass> jklass(thread->GetJniEnv(),
thread->GetJniEnv()->AddLocalReference<jclass>(klass.Get()));
ScopedLocalRef<jthread> thread_jni(
@@ -283,32 +295,209 @@ struct ClassCallback : public art::ClassLoadCallback {
}
}
+ // To support parallel class-loading, we need to perform some locking dances here. Namely,
+ // the fixup stage must not be holding the temp_classes lock when it fixes up the system
+ // (as that requires suspending all mutators).
+
void AddTempClass(art::Thread* self, jclass klass) {
std::unique_lock<std::mutex> mu(temp_classes_lock);
- temp_classes.push_back(reinterpret_cast<jclass>(self->GetJniEnv()->NewGlobalRef(klass)));
+ jclass global_klass = reinterpret_cast<jclass>(self->GetJniEnv()->NewGlobalRef(klass));
+ temp_classes.push_back(global_klass);
}
- void HandleTempClass(art::Handle<art::mirror::Class> temp_klass,
+ void HandleTempClass(art::Thread* self,
+ art::Handle<art::mirror::Class> temp_klass,
art::Handle<art::mirror::Class> klass)
REQUIRES_SHARED(art::Locks::mutator_lock_) {
- std::unique_lock<std::mutex> mu(temp_classes_lock);
- if (temp_classes.empty()) {
- return;
- }
+ bool requires_fixup = false;
+ {
+ std::unique_lock<std::mutex> mu(temp_classes_lock);
+ if (temp_classes.empty()) {
+ return;
+ }
- art::Thread* self = art::Thread::Current();
- for (auto it = temp_classes.begin(); it != temp_classes.end(); ++it) {
- if (temp_klass.Get() == art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(*it))) {
- temp_classes.erase(it);
- FixupTempClass(temp_klass, klass);
+ for (auto it = temp_classes.begin(); it != temp_classes.end(); ++it) {
+ if (temp_klass.Get() == art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(*it))) {
+ self->GetJniEnv()->DeleteGlobalRef(*it);
+ temp_classes.erase(it);
+ requires_fixup = true;
+ break;
+ }
}
}
+ if (requires_fixup) {
+ FixupTempClass(self, temp_klass, klass);
+ }
}
- void FixupTempClass(art::Handle<art::mirror::Class> temp_klass ATTRIBUTE_UNUSED,
- art::Handle<art::mirror::Class> klass ATTRIBUTE_UNUSED)
+ void FixupTempClass(art::Thread* self,
+ art::Handle<art::mirror::Class> temp_klass,
+ art::Handle<art::mirror::Class> klass)
REQUIRES_SHARED(art::Locks::mutator_lock_) {
- // TODO: Implement.
+ // Suspend everything.
+ art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
+ if (heap->IsGcConcurrentAndMoving()) {
+ // Need to take a heap dump while GC isn't running. See the
+ // comment in Heap::VisitObjects().
+ heap->IncrementDisableMovingGC(self);
+ }
+ {
+ art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
+ art::ScopedSuspendAll ssa("FixupTempClass");
+
+ art::mirror::Class* input = temp_klass.Get();
+ art::mirror::Class* output = klass.Get();
+
+ FixupGlobalReferenceTables(input, output);
+ FixupLocalReferenceTables(self, input, output);
+ FixupHeap(input, output);
+ }
+ if (heap->IsGcConcurrentAndMoving()) {
+ heap->DecrementDisableMovingGC(self);
+ }
+ }
+
+ class RootUpdater : public art::RootVisitor {
+ public:
+ RootUpdater(const art::mirror::Class* input, art::mirror::Class* output)
+ : input_(input), output_(output) {}
+
+ void VisitRoots(art::mirror::Object*** roots,
+ size_t count,
+ const art::RootInfo& info ATTRIBUTE_UNUSED)
+ OVERRIDE {
+ for (size_t i = 0; i != count; ++i) {
+ if (*roots[i] == input_) {
+ *roots[i] = output_;
+ }
+ }
+ }
+
+ void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
+ size_t count,
+ const art::RootInfo& info ATTRIBUTE_UNUSED)
+ OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ for (size_t i = 0; i != count; ++i) {
+ if (roots[i]->AsMirrorPtr() == input_) {
+ roots[i]->Assign(output_);
+ }
+ }
+ }
+
+ private:
+ const art::mirror::Class* input_;
+ art::mirror::Class* output_;
+ };
+
+ void FixupGlobalReferenceTables(art::mirror::Class* input, art::mirror::Class* output)
+ REQUIRES(art::Locks::mutator_lock_) {
+ art::JavaVMExt* java_vm = art::Runtime::Current()->GetJavaVM();
+
+ // Fix up the global table with a root visitor.
+ RootUpdater global_update(input, output);
+ java_vm->VisitRoots(&global_update);
+
+ class WeakGlobalUpdate : public art::IsMarkedVisitor {
+ public:
+ WeakGlobalUpdate(art::mirror::Class* root_input, art::mirror::Class* root_output)
+ : input_(root_input), output_(root_output) {}
+
+ art::mirror::Object* IsMarked(art::mirror::Object* obj) OVERRIDE {
+ if (obj == input_) {
+ return output_;
+ }
+ return obj;
+ }
+
+ private:
+ const art::mirror::Class* input_;
+ art::mirror::Class* output_;
+ };
+ WeakGlobalUpdate weak_global_update(input, output);
+ java_vm->SweepJniWeakGlobals(&weak_global_update);
+ }
+
+ void FixupLocalReferenceTables(art::Thread* self,
+ art::mirror::Class* input,
+ art::mirror::Class* output)
+ REQUIRES(art::Locks::mutator_lock_) {
+ class LocalUpdate {
+ public:
+ LocalUpdate(const art::mirror::Class* root_input, art::mirror::Class* root_output)
+ : input_(root_input), output_(root_output) {}
+
+ static void Callback(art::Thread* t, void* arg) REQUIRES(art::Locks::mutator_lock_) {
+ LocalUpdate* local = reinterpret_cast<LocalUpdate*>(arg);
+
+ // Fix up the local table with a root visitor.
+ RootUpdater local_update(local->input_, local->output_);
+ t->GetJniEnv()->locals.VisitRoots(
+ &local_update, art::RootInfo(art::kRootJNILocal, t->GetThreadId()));
+ }
+
+ private:
+ const art::mirror::Class* input_;
+ art::mirror::Class* output_;
+ };
+ LocalUpdate local_upd(input, output);
+ art::MutexLock mu(self, *art::Locks::thread_list_lock_);
+ art::Runtime::Current()->GetThreadList()->ForEach(LocalUpdate::Callback, &local_upd);
+ }
+
+ void FixupHeap(art::mirror::Class* input, art::mirror::Class* output)
+ REQUIRES(art::Locks::mutator_lock_) {
+ class HeapFixupVisitor {
+ public:
+ HeapFixupVisitor(const art::mirror::Class* root_input, art::mirror::Class* root_output)
+ : input_(root_input), output_(root_output) {}
+
+ void operator()(art::mirror::Object* src,
+ art::MemberOffset field_offset,
+ bool is_static ATTRIBUTE_UNUSED) const
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ art::mirror::HeapReference<art::mirror::Object>* trg =
+ src->GetFieldObjectReferenceAddr(field_offset);
+ if (trg->AsMirrorPtr() == input_) {
+ DCHECK_NE(field_offset.Uint32Value(), 0u); // This shouldn't be the class field of
+ // an object.
+ trg->Assign(output_);
+ }
+ }
+
+ void operator()(art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
+ art::ObjPtr<art::mirror::Reference> reference) const
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ art::mirror::Object* val = reference->GetReferent();
+ if (val == input_) {
+ reference->SetReferent<false>(output_);
+ }
+ }
+
+ void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
+ const {
+ LOG(FATAL) << "Unreachable";
+ }
+
+ void VisitRootIfNonNull(
+ art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
+ LOG(FATAL) << "Unreachable";
+ }
+
+ static void AllObjectsCallback(art::mirror::Object* obj, void* arg)
+ REQUIRES_SHARED(art::Locks::mutator_lock_) {
+ HeapFixupVisitor* hfv = reinterpret_cast<HeapFixupVisitor*>(arg);
+
+ // Visit references, not native roots.
+ obj->VisitReferences<false>(*hfv, *hfv);
+ }
+
+ private:
+ const art::mirror::Class* input_;
+ art::mirror::Class* output_;
+ };
+ HeapFixupVisitor hfv(input, output);
+ art::Runtime::Current()->GetHeap()->VisitObjectsPaused(HeapFixupVisitor::AllObjectsCallback,
+ &hfv);
}
// A set of all the temp classes we have handed out. We have to fix up references to these.
diff --git a/runtime/openjdkjvmti/ti_class_loader.cc b/runtime/openjdkjvmti/ti_class_loader.cc
index afec0bfac0..d05f579407 100644
--- a/runtime/openjdkjvmti/ti_class_loader.cc
+++ b/runtime/openjdkjvmti/ti_class_loader.cc
@@ -119,11 +119,11 @@ art::ObjPtr<art::mirror::LongArray> ClassLoaderHelper::AllocateNewDexFileCookie(
art::Handle<art::mirror::LongArray> cookie,
const art::DexFile* dex_file) {
art::StackHandleScope<1> hs(self);
- CHECK(cookie.Get() != nullptr);
+ CHECK(cookie != nullptr);
CHECK_GE(cookie->GetLength(), 1);
art::Handle<art::mirror::LongArray> new_cookie(
hs.NewHandle(art::mirror::LongArray::Alloc(self, cookie->GetLength() + 1)));
- if (new_cookie.Get() == nullptr) {
+ if (new_cookie == nullptr) {
self->AssertPendingOOMException();
return nullptr;
}
@@ -183,13 +183,13 @@ art::ObjPtr<art::mirror::Object> ClassLoaderHelper::FindSourceDexFileObject(
// Start navigating the fields of the loader (now known to be a BaseDexClassLoader derivative)
art::Handle<art::mirror::Object> path_list(
hs.NewHandle(path_list_field->GetObject(loader.Get())));
- CHECK(path_list.Get() != nullptr);
+ CHECK(path_list != nullptr);
CHECK(!self->IsExceptionPending());
art::Handle<art::mirror::ObjectArray<art::mirror::Object>> dex_elements_list(hs.NewHandle(
dex_path_list_element_field->GetObject(path_list.Get())->
AsObjectArray<art::mirror::Object>()));
CHECK(!self->IsExceptionPending());
- CHECK(dex_elements_list.Get() != nullptr);
+ CHECK(dex_elements_list != nullptr);
size_t num_elements = dex_elements_list->GetLength();
// Iterate over the DexPathList$Element to find the right one
for (size_t i = 0; i < num_elements; i++) {
diff --git a/runtime/openjdkjvmti/ti_redefine.cc b/runtime/openjdkjvmti/ti_redefine.cc
index f0c0dbcbfc..f01acc11aa 100644
--- a/runtime/openjdkjvmti/ti_redefine.cc
+++ b/runtime/openjdkjvmti/ti_redefine.cc
@@ -38,12 +38,17 @@
#include "art_jvmti.h"
#include "base/array_slice.h"
#include "base/logging.h"
+#include "debugger.h"
#include "dex_file.h"
#include "dex_file_types.h"
#include "events-inl.h"
#include "gc/allocation_listener.h"
#include "gc/heap.h"
#include "instrumentation.h"
+#include "jdwp/jdwp.h"
+#include "jdwp/jdwp_constants.h"
+#include "jdwp/jdwp_event.h"
+#include "jdwp/object_registry.h"
#include "jit/jit.h"
#include "jit/jit_code_cache.h"
#include "jni_env_ext-inl.h"
@@ -63,6 +68,66 @@ namespace openjdkjvmti {
using android::base::StringPrintf;
+// A helper that fills in a classes obsolete_methods_ and obsolete_dex_caches_ classExt fields as
+// they are created. This ensures that we can always call any method of an obsolete ArtMethod object
+// almost as soon as they are created since the GetObsoleteDexCache method will succeed.
+class ObsoleteMap {
+ public:
+ art::ArtMethod* FindObsoleteVersion(art::ArtMethod* original)
+ REQUIRES(art::Locks::mutator_lock_, art::Roles::uninterruptible_) {
+ auto method_pair = id_map_.find(original);
+ if (method_pair != id_map_.end()) {
+ art::ArtMethod* res = obsolete_methods_->GetElementPtrSize<art::ArtMethod*>(
+ method_pair->second, art::kRuntimePointerSize);
+ DCHECK(res != nullptr);
+ DCHECK_EQ(original, res->GetNonObsoleteMethod());
+ return res;
+ } else {
+ return nullptr;
+ }
+ }
+
+ void RecordObsolete(art::ArtMethod* original, art::ArtMethod* obsolete)
+ REQUIRES(art::Locks::mutator_lock_, art::Roles::uninterruptible_) {
+ DCHECK(original != nullptr);
+ DCHECK(obsolete != nullptr);
+ int32_t slot = next_free_slot_++;
+ DCHECK_LT(slot, obsolete_methods_->GetLength());
+ DCHECK(nullptr ==
+ obsolete_methods_->GetElementPtrSize<art::ArtMethod*>(slot, art::kRuntimePointerSize));
+ DCHECK(nullptr == obsolete_dex_caches_->Get(slot));
+ obsolete_methods_->SetElementPtrSize(slot, obsolete, art::kRuntimePointerSize);
+ obsolete_dex_caches_->Set(slot, original_dex_cache_);
+ id_map_.insert({original, slot});
+ }
+
+ ObsoleteMap(art::ObjPtr<art::mirror::PointerArray> obsolete_methods,
+ art::ObjPtr<art::mirror::ObjectArray<art::mirror::DexCache>> obsolete_dex_caches,
+ art::ObjPtr<art::mirror::DexCache> original_dex_cache)
+ : next_free_slot_(0),
+ obsolete_methods_(obsolete_methods),
+ obsolete_dex_caches_(obsolete_dex_caches),
+ original_dex_cache_(original_dex_cache) {
+ // Figure out where the first unused slot in the obsolete_methods_ array is.
+ while (obsolete_methods_->GetElementPtrSize<art::ArtMethod*>(
+ next_free_slot_, art::kRuntimePointerSize) != nullptr) {
+ DCHECK(obsolete_dex_caches_->Get(next_free_slot_) != nullptr);
+ next_free_slot_++;
+ }
+ // Sanity check that the same slot in obsolete_dex_caches_ is free.
+ DCHECK(obsolete_dex_caches_->Get(next_free_slot_) == nullptr);
+ }
+
+ private:
+ int32_t next_free_slot_;
+ std::unordered_map<art::ArtMethod*, int32_t> id_map_;
+ // Pointers to the fields in mirror::ClassExt. These can be held as ObjPtr since this is only used
+ // when we have an exclusive mutator_lock_ (i.e. all threads are suspended).
+ art::ObjPtr<art::mirror::PointerArray> obsolete_methods_;
+ art::ObjPtr<art::mirror::ObjectArray<art::mirror::DexCache>> obsolete_dex_caches_;
+ art::ObjPtr<art::mirror::DexCache> original_dex_cache_;
+};
+
// This visitor walks thread stacks and allocates and sets up the obsolete methods. It also does
// some basic sanity checks that the obsolete method is sane.
class ObsoleteMethodStackVisitor : public art::StackVisitor {
@@ -71,7 +136,7 @@ class ObsoleteMethodStackVisitor : public art::StackVisitor {
art::Thread* thread,
art::LinearAlloc* allocator,
const std::unordered_set<art::ArtMethod*>& obsoleted_methods,
- /*out*/std::unordered_map<art::ArtMethod*, art::ArtMethod*>* obsolete_maps)
+ ObsoleteMap* obsolete_maps)
: StackVisitor(thread,
/*context*/nullptr,
StackVisitor::StackWalkKind::kIncludeInlinedFrames),
@@ -89,7 +154,7 @@ class ObsoleteMethodStackVisitor : public art::StackVisitor {
art::Thread* thread,
art::LinearAlloc* allocator,
const std::unordered_set<art::ArtMethod*>& obsoleted_methods,
- /*out*/std::unordered_map<art::ArtMethod*, art::ArtMethod*>* obsolete_maps)
+ ObsoleteMap* obsolete_maps)
REQUIRES(art::Locks::mutator_lock_) {
ObsoleteMethodStackVisitor visitor(thread,
allocator,
@@ -99,6 +164,7 @@ class ObsoleteMethodStackVisitor : public art::StackVisitor {
}
bool VisitFrame() OVERRIDE REQUIRES(art::Locks::mutator_lock_) {
+ art::ScopedAssertNoThreadSuspension snts("Fixing up the stack for obsolete methods.");
art::ArtMethod* old_method = GetMethod();
if (obsoleted_methods_.find(old_method) != obsoleted_methods_.end()) {
// We cannot ensure that the right dex file is used in inlined frames so we don't support
@@ -108,9 +174,8 @@ class ObsoleteMethodStackVisitor : public art::StackVisitor {
// TODO We should really support redefining intrinsics.
// We don't support intrinsics so check for them here.
DCHECK(!old_method->IsIntrinsic());
- art::ArtMethod* new_obsolete_method = nullptr;
- auto obsolete_method_pair = obsolete_maps_->find(old_method);
- if (obsolete_method_pair == obsolete_maps_->end()) {
+ art::ArtMethod* new_obsolete_method = obsolete_maps_->FindObsoleteVersion(old_method);
+ if (new_obsolete_method == nullptr) {
// Create a new Obsolete Method and put it in the list.
art::Runtime* runtime = art::Runtime::Current();
art::ClassLinker* cl = runtime->GetClassLinker();
@@ -124,7 +189,7 @@ class ObsoleteMethodStackVisitor : public art::StackVisitor {
DCHECK_EQ(new_obsolete_method->GetDeclaringClass(), old_method->GetDeclaringClass());
new_obsolete_method->SetIsObsolete();
new_obsolete_method->SetDontCompile();
- obsolete_maps_->insert({old_method, new_obsolete_method});
+ obsolete_maps_->RecordObsolete(old_method, new_obsolete_method);
// Update JIT Data structures to point to the new method.
art::jit::Jit* jit = art::Runtime::Current()->GetJit();
if (jit != nullptr) {
@@ -132,8 +197,6 @@ class ObsoleteMethodStackVisitor : public art::StackVisitor {
// structures to keep track of the new obsolete method.
jit->GetCodeCache()->MoveObsoleteMethod(old_method, new_obsolete_method);
}
- } else {
- new_obsolete_method = obsolete_method_pair->second;
}
DCHECK(new_obsolete_method != nullptr);
SetMethod(new_obsolete_method);
@@ -147,9 +210,9 @@ class ObsoleteMethodStackVisitor : public art::StackVisitor {
// The set of all methods which could be obsoleted.
const std::unordered_set<art::ArtMethod*>& obsoleted_methods_;
// A map from the original to the newly allocated obsolete method for frames on this thread. The
- // values in this map must be added to the obsolete_methods_ (and obsolete_dex_caches_) fields of
- // the redefined classes ClassExt by the caller.
- std::unordered_map<art::ArtMethod*, art::ArtMethod*>* obsolete_maps_;
+ // values in this map are added to the obsolete_methods_ (and obsolete_dex_caches_) fields of
+ // the redefined classes ClassExt as it is filled.
+ ObsoleteMap* obsolete_maps_;
};
jvmtiError Redefiner::IsModifiableClass(jvmtiEnv* env ATTRIBUTE_UNUSED,
@@ -426,11 +489,12 @@ art::mirror::ByteArray* Redefiner::ClassRedefinition::AllocateOrGetOriginalDexFi
}
struct CallbackCtx {
+ ObsoleteMap* obsolete_map;
art::LinearAlloc* allocator;
- std::unordered_map<art::ArtMethod*, art::ArtMethod*> obsolete_map;
std::unordered_set<art::ArtMethod*> obsolete_methods;
- explicit CallbackCtx(art::LinearAlloc* alloc) : allocator(alloc) {}
+ explicit CallbackCtx(ObsoleteMap* map, art::LinearAlloc* alloc)
+ : obsolete_map(map), allocator(alloc) {}
};
void DoAllocateObsoleteMethodsCallback(art::Thread* t, void* vdata) NO_THREAD_SAFETY_ANALYSIS {
@@ -438,7 +502,7 @@ void DoAllocateObsoleteMethodsCallback(art::Thread* t, void* vdata) NO_THREAD_SA
ObsoleteMethodStackVisitor::UpdateObsoleteFrames(t,
data->allocator,
data->obsolete_methods,
- &data->obsolete_map);
+ data->obsolete_map);
}
// This creates any ArtMethod* structures needed for obsolete methods and ensures that the stack is
@@ -449,9 +513,18 @@ void Redefiner::ClassRedefinition::FindAndAllocateObsoleteMethods(art::mirror::C
art::mirror::ClassExt* ext = art_klass->GetExtData();
CHECK(ext->GetObsoleteMethods() != nullptr);
art::ClassLinker* linker = driver_->runtime_->GetClassLinker();
- CallbackCtx ctx(linker->GetAllocatorForClassLoader(art_klass->GetClassLoader()));
+ // This holds pointers to the obsolete methods map fields which are updated as needed.
+ ObsoleteMap map(ext->GetObsoleteMethods(), ext->GetObsoleteDexCaches(), art_klass->GetDexCache());
+ CallbackCtx ctx(&map, linker->GetAllocatorForClassLoader(art_klass->GetClassLoader()));
// Add all the declared methods to the map
for (auto& m : art_klass->GetDeclaredMethods(art::kRuntimePointerSize)) {
+ // It is possible to simply filter out some methods where they cannot really become obsolete,
+ // such as native methods and keep their original (possibly optimized) implementations. We don't
+ // do this, however, since we would need to mark these functions (still in the classes
+ // declared_methods array) as obsolete so we will find the correct dex file to get meta-data
+ // from (for example about stack-frame size). Furthermore we would be unable to get some useful
+ // error checking from the interpreter which ensure we don't try to start executing obsolete
+ // methods.
ctx.obsolete_methods.insert(&m);
// TODO Allow this or check in IsModifiableClass.
DCHECK(!m.IsIntrinsic());
@@ -461,36 +534,6 @@ void Redefiner::ClassRedefinition::FindAndAllocateObsoleteMethods(art::mirror::C
art::ThreadList* list = art::Runtime::Current()->GetThreadList();
list->ForEach(DoAllocateObsoleteMethodsCallback, static_cast<void*>(&ctx));
}
- FillObsoleteMethodMap(art_klass, ctx.obsolete_map);
-}
-
-// Fills the obsolete method map in the art_klass's extData. This is so obsolete methods are able to
-// figure out their DexCaches.
-void Redefiner::ClassRedefinition::FillObsoleteMethodMap(
- art::mirror::Class* art_klass,
- const std::unordered_map<art::ArtMethod*, art::ArtMethod*>& obsoletes) {
- int32_t index = 0;
- art::mirror::ClassExt* ext_data = art_klass->GetExtData();
- art::mirror::PointerArray* obsolete_methods = ext_data->GetObsoleteMethods();
- art::mirror::ObjectArray<art::mirror::DexCache>* obsolete_dex_caches =
- ext_data->GetObsoleteDexCaches();
- int32_t num_method_slots = obsolete_methods->GetLength();
- // Find the first empty index.
- for (; index < num_method_slots; index++) {
- if (obsolete_methods->GetElementPtrSize<art::ArtMethod*>(
- index, art::kRuntimePointerSize) == nullptr) {
- break;
- }
- }
- // Make sure we have enough space.
- CHECK_GT(num_method_slots, static_cast<int32_t>(obsoletes.size() + index));
- CHECK(obsolete_dex_caches->Get(index) == nullptr);
- // Fill in the map.
- for (auto& obs : obsoletes) {
- obsolete_methods->SetElementPtrSize(index, obs.second, art::kRuntimePointerSize);
- obsolete_dex_caches->Set(index, art_klass->GetDexCache());
- index++;
- }
}
// Try and get the declared method. First try to get a virtual method then a direct method if that's
@@ -934,7 +977,7 @@ bool Redefiner::ClassRedefinition::FinishRemainingAllocations(
art::Handle<art::mirror::Object> dex_file_obj(hs.NewHandle(
ClassLoaderHelper::FindSourceDexFileObject(driver_->self_, loader)));
holder->SetJavaDexFile(klass_index, dex_file_obj.Get());
- if (dex_file_obj.Get() == nullptr) {
+ if (dex_file_obj == nullptr) {
// TODO Better error msg.
RecordFailure(ERR(INTERNAL), "Unable to find dex file!");
return false;
@@ -966,6 +1009,23 @@ bool Redefiner::ClassRedefinition::FinishRemainingAllocations(
return true;
}
+void Redefiner::ClassRedefinition::UnregisterBreakpoints() {
+ DCHECK(art::Dbg::IsDebuggerActive());
+ art::JDWP::JdwpState* state = art::Dbg::GetJdwpState();
+ if (state != nullptr) {
+ state->UnregisterLocationEventsOnClass(GetMirrorClass());
+ }
+}
+
+void Redefiner::UnregisterAllBreakpoints() {
+ if (LIKELY(!art::Dbg::IsDebuggerActive())) {
+ return;
+ }
+ for (Redefiner::ClassRedefinition& redef : redefinitions_) {
+ redef.UnregisterBreakpoints();
+ }
+}
+
bool Redefiner::CheckAllRedefinitionAreValid() {
for (Redefiner::ClassRedefinition& redef : redefinitions_) {
if (!redef.CheckRedefinitionIsValid()) {
@@ -1044,6 +1104,7 @@ jvmtiError Redefiner::Run() {
// cleaned up by the GC eventually.
return result_;
}
+ // At this point we can no longer fail without corrupting the runtime state.
int32_t counter = 0;
for (Redefiner::ClassRedefinition& redef : redefinitions_) {
if (holder.GetSourceClassLoader(counter) == nullptr) {
@@ -1051,6 +1112,7 @@ jvmtiError Redefiner::Run() {
}
counter++;
}
+ UnregisterAllBreakpoints();
// Disable GC and wait for it to be done if we are a moving GC. This is fine since we are done
// allocating so no deadlocks.
art::gc::Heap* heap = runtime_->GetHeap();
@@ -1083,9 +1145,7 @@ jvmtiError Redefiner::Run() {
holder.GetOriginalDexFileBytes(counter));
counter++;
}
- // TODO Verify the new Class.
// TODO Shrink the obsolete method maps if possible?
- // TODO find appropriate class loader.
// TODO Put this into a scoped thing.
runtime_->GetThreadList()->ResumeAll();
// Get back shared mutator lock as expected for return.
@@ -1190,13 +1250,13 @@ bool Redefiner::ClassRedefinition::EnsureClassAllocationsFinished() {
art::StackHandleScope<2> hs(driver_->self_);
art::Handle<art::mirror::Class> klass(hs.NewHandle(
driver_->self_->DecodeJObject(klass_)->AsClass()));
- if (klass.Get() == nullptr) {
+ if (klass == nullptr) {
RecordFailure(ERR(INVALID_CLASS), "Unable to decode class argument!");
return false;
}
// Allocate the classExt
art::Handle<art::mirror::ClassExt> ext(hs.NewHandle(klass->EnsureExtDataPresent(driver_->self_)));
- if (ext.Get() == nullptr) {
+ if (ext == nullptr) {
// No memory. Clear exception (it's not useful) and return error.
// TODO This doesn't need to be fatal. We could just not support obsolete methods after hitting
// this case.
diff --git a/runtime/openjdkjvmti/ti_redefine.h b/runtime/openjdkjvmti/ti_redefine.h
index 421d22ef4c..65ee2912e2 100644
--- a/runtime/openjdkjvmti/ti_redefine.h
+++ b/runtime/openjdkjvmti/ti_redefine.h
@@ -155,12 +155,6 @@ class Redefiner {
void FindAndAllocateObsoleteMethods(art::mirror::Class* art_klass)
REQUIRES(art::Locks::mutator_lock_);
- void FillObsoleteMethodMap(
- art::mirror::Class* art_klass,
- const std::unordered_map<art::ArtMethod*, art::ArtMethod*>& obsoletes)
- REQUIRES(art::Locks::mutator_lock_);
-
-
// Checks that the dex file contains only the single expected class and that the top-level class
// data has not been modified in an incompatible manner.
bool CheckClass() REQUIRES_SHARED(art::Locks::mutator_lock_);
@@ -207,6 +201,8 @@ class Redefiner {
void ReleaseDexFile() REQUIRES_SHARED(art::Locks::mutator_lock_);
+ void UnregisterBreakpoints() REQUIRES_SHARED(art::Locks::mutator_lock_);
+
private:
Redefiner* driver_;
jclass klass_;
@@ -250,6 +246,7 @@ class Redefiner {
bool FinishAllRemainingAllocations(RedefinitionDataHolder& holder)
REQUIRES_SHARED(art::Locks::mutator_lock_);
void ReleaseAllDexFiles() REQUIRES_SHARED(art::Locks::mutator_lock_);
+ void UnregisterAllBreakpoints() REQUIRES_SHARED(art::Locks::mutator_lock_);
void RecordFailure(jvmtiError result, const std::string& class_sig, const std::string& error_msg);
void RecordFailure(jvmtiError result, const std::string& error_msg) {
diff --git a/runtime/openjdkjvmti/ti_threadgroup.cc b/runtime/openjdkjvmti/ti_threadgroup.cc
index e63ce6576a..142387433e 100644
--- a/runtime/openjdkjvmti/ti_threadgroup.cc
+++ b/runtime/openjdkjvmti/ti_threadgroup.cc
@@ -155,7 +155,7 @@ jvmtiError ThreadGroupUtil::GetThreadGroupInfo(jvmtiEnv* env,
static bool IsInDesiredThreadGroup(art::Handle<art::mirror::Object> desired_thread_group,
art::ObjPtr<art::mirror::Object> peer)
REQUIRES_SHARED(art::Locks::mutator_lock_) {
- CHECK(desired_thread_group.Get() != nullptr);
+ CHECK(desired_thread_group != nullptr);
art::ArtField* thread_group_field =
art::jni::DecodeArtField(art::WellKnownClasses::java_lang_Thread_group);
@@ -167,7 +167,7 @@ static bool IsInDesiredThreadGroup(art::Handle<art::mirror::Object> desired_thre
static void GetThreads(art::Handle<art::mirror::Object> thread_group,
std::vector<art::ObjPtr<art::mirror::Object>>* thread_peers)
REQUIRES_SHARED(art::Locks::mutator_lock_) REQUIRES(!art::Locks::thread_list_lock_) {
- CHECK(thread_group.Get() != nullptr);
+ CHECK(thread_group != nullptr);
art::MutexLock mu(art::Thread::Current(), *art::Locks::thread_list_lock_);
for (art::Thread* t : art::Runtime::Current()->GetThreadList()->GetList()) {
@@ -187,7 +187,7 @@ static void GetThreads(art::Handle<art::mirror::Object> thread_group,
static void GetChildThreadGroups(art::Handle<art::mirror::Object> thread_group,
std::vector<art::ObjPtr<art::mirror::Object>>* thread_groups)
REQUIRES_SHARED(art::Locks::mutator_lock_) {
- CHECK(thread_group.Get() != nullptr);
+ CHECK(thread_group != nullptr);
// Get the ThreadGroup[] "groups" out of this thread group...
art::ArtField* groups_field =
diff --git a/runtime/proxy_test.cc b/runtime/proxy_test.cc
index 1292a819a3..5748475163 100644
--- a/runtime/proxy_test.cc
+++ b/runtime/proxy_test.cc
@@ -114,8 +114,8 @@ TEST_F(ProxyTest, ProxyClassHelper) {
class_linker_->FindClass(soa.Self(), "LInterfaces$I;", class_loader)));
Handle<mirror::Class> J(hs.NewHandle(
class_linker_->FindClass(soa.Self(), "LInterfaces$J;", class_loader)));
- ASSERT_TRUE(I.Get() != nullptr);
- ASSERT_TRUE(J.Get() != nullptr);
+ ASSERT_TRUE(I != nullptr);
+ ASSERT_TRUE(J != nullptr);
std::vector<mirror::Class*> interfaces;
interfaces.push_back(I.Get());
@@ -123,7 +123,7 @@ TEST_F(ProxyTest, ProxyClassHelper) {
Handle<mirror::Class> proxy_class(hs.NewHandle(
GenerateProxyClass(soa, jclass_loader, "$Proxy1234", interfaces)));
interfaces.clear(); // Don't least possibly stale objects in the array as good practice.
- ASSERT_TRUE(proxy_class.Get() != nullptr);
+ ASSERT_TRUE(proxy_class != nullptr);
ASSERT_TRUE(proxy_class->IsProxyClass());
ASSERT_TRUE(proxy_class->IsInitialized());
@@ -148,8 +148,8 @@ TEST_F(ProxyTest, ProxyFieldHelper) {
class_linker_->FindClass(soa.Self(), "LInterfaces$I;", class_loader)));
Handle<mirror::Class> J(hs.NewHandle(
class_linker_->FindClass(soa.Self(), "LInterfaces$J;", class_loader)));
- ASSERT_TRUE(I.Get() != nullptr);
- ASSERT_TRUE(J.Get() != nullptr);
+ ASSERT_TRUE(I != nullptr);
+ ASSERT_TRUE(J != nullptr);
Handle<mirror::Class> proxyClass;
{
@@ -159,7 +159,7 @@ TEST_F(ProxyTest, ProxyFieldHelper) {
proxyClass = hs.NewHandle(GenerateProxyClass(soa, jclass_loader, "$Proxy1234", interfaces));
}
- ASSERT_TRUE(proxyClass.Get() != nullptr);
+ ASSERT_TRUE(proxyClass != nullptr);
ASSERT_TRUE(proxyClass->IsProxyClass());
ASSERT_TRUE(proxyClass->IsInitialized());
@@ -171,10 +171,10 @@ TEST_F(ProxyTest, ProxyFieldHelper) {
Handle<mirror::Class> interfacesFieldClass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Class;")));
- ASSERT_TRUE(interfacesFieldClass.Get() != nullptr);
+ ASSERT_TRUE(interfacesFieldClass != nullptr);
Handle<mirror::Class> throwsFieldClass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[[Ljava/lang/Class;")));
- ASSERT_TRUE(throwsFieldClass.Get() != nullptr);
+ ASSERT_TRUE(throwsFieldClass != nullptr);
// Test "Class[] interfaces" field.
ArtField* field = &static_fields->At(0);
@@ -208,10 +208,10 @@ TEST_F(ProxyTest, CheckArtMirrorFieldsOfProxyStaticFields) {
proxyClass1 = hs.NewHandle(GenerateProxyClass(soa, jclass_loader, "$Proxy1", interfaces));
}
- ASSERT_TRUE(proxyClass0.Get() != nullptr);
+ ASSERT_TRUE(proxyClass0 != nullptr);
ASSERT_TRUE(proxyClass0->IsProxyClass());
ASSERT_TRUE(proxyClass0->IsInitialized());
- ASSERT_TRUE(proxyClass1.Get() != nullptr);
+ ASSERT_TRUE(proxyClass1 != nullptr);
ASSERT_TRUE(proxyClass1->IsProxyClass());
ASSERT_TRUE(proxyClass1->IsInitialized());
diff --git a/runtime/reference_table_test.cc b/runtime/reference_table_test.cc
index 9523e92d7c..4ccfb6d83b 100644
--- a/runtime/reference_table_test.cc
+++ b/runtime/reference_table_test.cc
@@ -48,12 +48,12 @@ static mirror::Object* CreateWeakReference(mirror::Object* referent)
class_linker->FindClass(self,
"Ljava/lang/ref/WeakReference;",
ScopedNullHandle<mirror::ClassLoader>())));
- CHECK(h_ref_class.Get() != nullptr);
+ CHECK(h_ref_class != nullptr);
CHECK(class_linker->EnsureInitialized(self, h_ref_class, true, true));
Handle<mirror::Object> h_ref_instance(scope.NewHandle<mirror::Object>(
h_ref_class->AllocObject(self)));
- CHECK(h_ref_instance.Get() != nullptr);
+ CHECK(h_ref_instance != nullptr);
ArtMethod* constructor = h_ref_class->FindDeclaredDirectMethod(
"<init>", "(Ljava/lang/Object;)V", class_linker->GetImagePointerSize());
diff --git a/runtime/reflection.cc b/runtime/reflection.cc
index a2b4cb37a9..3c64d40720 100644
--- a/runtime/reflection.cc
+++ b/runtime/reflection.cc
@@ -231,8 +231,8 @@ class ArgArray {
hs.NewHandle<mirror::ObjectArray<mirror::Object>>(raw_args));
for (size_t i = 1, args_offset = 0; i < shorty_len_; ++i, ++args_offset) {
arg.Assign(args->Get(args_offset));
- if (((shorty_[i] == 'L') && (arg.Get() != nullptr)) ||
- ((arg.Get() == nullptr && shorty_[i] != 'L'))) {
+ if (((shorty_[i] == 'L') && (arg != nullptr)) ||
+ ((arg == nullptr && shorty_[i] != 'L'))) {
// TODO: The method's parameter's type must have been previously resolved, yet
// we've seen cases where it's not b/34440020.
ObjPtr<mirror::Class> dst_class(
@@ -242,7 +242,7 @@ class ArgArray {
CHECK(self->IsExceptionPending());
return false;
}
- if (UNLIKELY(arg.Get() == nullptr || !arg->InstanceOf(dst_class))) {
+ if (UNLIKELY(arg == nullptr || !arg->InstanceOf(dst_class))) {
ThrowIllegalArgumentException(
StringPrintf("method %s argument %zd has type %s, got %s",
m->PrettyMethod(false).c_str(),
@@ -254,13 +254,13 @@ class ArgArray {
}
#define DO_FIRST_ARG(match_descriptor, get_fn, append) { \
- if (LIKELY(arg.Get() != nullptr && \
+ if (LIKELY(arg != nullptr && \
arg->GetClass()->DescriptorEquals(match_descriptor))) { \
ArtField* primitive_field = arg->GetClass()->GetInstanceField(0); \
append(primitive_field-> get_fn(arg.Get()));
#define DO_ARG(match_descriptor, get_fn, append) \
- } else if (LIKELY(arg.Get() != nullptr && \
+ } else if (LIKELY(arg != nullptr && \
arg->GetClass<>()->DescriptorEquals(match_descriptor))) { \
ArtField* primitive_field = arg->GetClass()->GetInstanceField(0); \
append(primitive_field-> get_fn(arg.Get()));
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 9609bee022..f8f3d766c0 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -95,6 +95,7 @@
#include "mirror/field.h"
#include "mirror/method.h"
#include "mirror/method_handle_impl.h"
+#include "mirror/method_handles_lookup.h"
#include "mirror/method_type.h"
#include "mirror/stack_trace_element.h"
#include "mirror/throwable.h"
@@ -1715,6 +1716,7 @@ void Runtime::VisitConstantRoots(RootVisitor* visitor) {
mirror::Field::VisitRoots(visitor);
mirror::MethodType::VisitRoots(visitor);
mirror::MethodHandleImpl::VisitRoots(visitor);
+ mirror::MethodHandlesLookup::VisitRoots(visitor);
mirror::EmulatedStackFrame::VisitRoots(visitor);
mirror::ClassExt::VisitRoots(visitor);
// Visit all the primitive array types classes.
diff --git a/runtime/runtime_callbacks_test.cc b/runtime/runtime_callbacks_test.cc
index f1e78b40e0..abe99e0d50 100644
--- a/runtime/runtime_callbacks_test.cc
+++ b/runtime/runtime_callbacks_test.cc
@@ -294,7 +294,7 @@ TEST_F(ClassLoadCallbackRuntimeCallbacksTest, ClassLoadCallback) {
const char* descriptor_y = "LY;";
Handle<mirror::Class> h_Y(
hs.NewHandle(class_linker_->FindClass(soa.Self(), descriptor_y, class_loader)));
- ASSERT_TRUE(h_Y.Get() != nullptr);
+ ASSERT_TRUE(h_Y != nullptr);
bool expect1 = Expect({ "PreDefine:LY; <art-gtest-XandY.jar>",
"PreDefine:LX; <art-gtest-XandY.jar>",
diff --git a/runtime/stack.cc b/runtime/stack.cc
index d7ba1d75d8..51a24e4e01 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -874,9 +874,13 @@ void StackVisitor::WalkStack(bool include_transitions) {
CHECK_EQ(GetMethod(), callee) << "Expected: " << ArtMethod::PrettyMethod(callee)
<< " Found: " << ArtMethod::PrettyMethod(GetMethod());
} else {
- CHECK_EQ(instrumentation_frame.method_, GetMethod())
- << "Expected: " << ArtMethod::PrettyMethod(instrumentation_frame.method_)
- << " Found: " << ArtMethod::PrettyMethod(GetMethod());
+ // Instrumentation generally doesn't distinguish between a method's obsolete and
+ // non-obsolete version.
+ CHECK_EQ(instrumentation_frame.method_->GetNonObsoleteMethod(),
+ GetMethod()->GetNonObsoleteMethod())
+ << "Expected: "
+ << ArtMethod::PrettyMethod(instrumentation_frame.method_->GetNonObsoleteMethod())
+ << " Found: " << ArtMethod::PrettyMethod(GetMethod()->GetNonObsoleteMethod());
}
if (num_frames_ != 0) {
// Check agreement of frame Ids only if num_frames_ is computed to avoid infinite
@@ -903,7 +907,7 @@ void StackVisitor::WalkStack(bool include_transitions) {
<< " native=" << method->IsNative()
<< std::noboolalpha
<< " entrypoints=" << method->GetEntryPointFromQuickCompiledCode()
- << "," << method->GetEntryPointFromJni()
+ << "," << (method->IsNative() ? method->GetEntryPointFromJni() : nullptr)
<< " next=" << *cur_quick_frame_;
}
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 61d6a5847c..f7a64026b7 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -747,6 +747,7 @@ class StackMapEncoding {
return total_bit_size_;
}
+ // Encode the encoding into the vector.
template<typename Vector>
void Encode(Vector* dest) const {
static_assert(alignof(StackMapEncoding) == 1, "Should not require alignment");
@@ -754,6 +755,7 @@ class StackMapEncoding {
dest->insert(dest->end(), ptr, ptr + sizeof(*this));
}
+ // Decode the encoding from a pointer, updates the pointer.
void Decode(const uint8_t** ptr) {
*this = *reinterpret_cast<const StackMapEncoding*>(*ptr);
*ptr += sizeof(*this);
@@ -924,6 +926,7 @@ class InlineInfoEncoding {
void Dump(VariableIndentationOutputStream* vios) const;
+ // Encode the encoding into the vector.
template<typename Vector>
void Encode(Vector* dest) const {
static_assert(alignof(InlineInfoEncoding) == 1, "Should not require alignment");
@@ -931,6 +934,7 @@ class InlineInfoEncoding {
dest->insert(dest->end(), ptr, ptr + sizeof(*this));
}
+ // Decode the encoding from a pointer, updates the pointer.
void Decode(const uint8_t** ptr) {
*this = *reinterpret_cast<const InlineInfoEncoding*>(*ptr);
*ptr += sizeof(*this);
@@ -1171,6 +1175,7 @@ struct CodeInfoEncoding {
ComputeTableOffsets();
}
+ // Compress is not const since it calculates cache_header_size. This is used by PrepareForFillIn.
template<typename Vector>
void Compress(Vector* dest) {
dex_register_map.Encode(dest);
@@ -1210,9 +1215,9 @@ struct CodeInfoEncoding {
private:
// Computed fields (not serialized).
- // Header size in bytes.
+ // Header size in bytes, cached to avoid needing to re-decoding the encoding in HeaderSize.
uint32_t cache_header_size = kInvalidSize;
- // Non header size in bytes.
+ // Non header size in bytes, cached to avoid needing to re-decoding the encoding in NonHeaderSize.
uint32_t cache_non_header_size = kInvalidSize;
};
@@ -1221,7 +1226,13 @@ struct CodeInfoEncoding {
* The information is of the form:
*
* [CodeInfoEncoding, DexRegisterMap+, DexLocationCatalog+, StackMap+, RegisterMask+, StackMask+,
- * DexRegisterMap+, InlineInfo*]
+ * InlineInfo*]
+ *
+ * where CodeInfoEncoding is of the form:
+ *
+ * [ByteSizedTable(dex_register_map), ByteSizedTable(location_catalog),
+ * BitEncodingTable<StackMapEncoding>, BitEncodingTable<BitRegionEncoding>,
+ * BitEncodingTable<BitRegionEncoding>, BitEncodingTable<InlineInfoEncoding>]
*/
class CodeInfo {
public:
@@ -1331,7 +1342,9 @@ class CodeInfo {
}
InlineInfo GetInlineInfo(size_t index, const CodeInfoEncoding& encoding) const {
- // Since we do not know the depth, we just return the whole remaining map.
+ // Since we do not know the depth, we just return the whole remaining map. The caller may
+ // access the inline info for arbitrary depths. To return the precise inline info we would need
+ // to count the depth before returning.
// TODO: Clean this up.
const size_t bit_offset = encoding.inline_info.bit_offset +
index * encoding.inline_info.encoding.BitSize();
diff --git a/runtime/thread-inl.h b/runtime/thread-inl.h
index c92305f373..8d946262e8 100644
--- a/runtime/thread-inl.h
+++ b/runtime/thread-inl.h
@@ -80,7 +80,34 @@ inline void Thread::CheckSuspend() {
}
}
-inline void Thread::CheckEmptyCheckpoint() {
+inline void Thread::CheckEmptyCheckpointFromWeakRefAccess(BaseMutex* cond_var_mutex) {
+ Thread* self = Thread::Current();
+ DCHECK_EQ(self, this);
+ for (;;) {
+ if (ReadFlag(kEmptyCheckpointRequest)) {
+ RunEmptyCheckpoint();
+ // Check we hold only an expected mutex when accessing weak ref.
+ if (kIsDebugBuild) {
+ for (int i = kLockLevelCount - 1; i >= 0; --i) {
+ BaseMutex* held_mutex = self->GetHeldMutex(static_cast<LockLevel>(i));
+ if (held_mutex != nullptr &&
+ held_mutex != Locks::mutator_lock_ &&
+ held_mutex != cond_var_mutex) {
+ std::vector<BaseMutex*>& expected_mutexes = Locks::expected_mutexes_on_weak_ref_access_;
+ CHECK(std::find(expected_mutexes.begin(), expected_mutexes.end(), held_mutex) !=
+ expected_mutexes.end())
+ << "Holding unexpected mutex " << held_mutex->GetName()
+ << " when accessing weak ref";
+ }
+ }
+ }
+ } else {
+ break;
+ }
+ }
+}
+
+inline void Thread::CheckEmptyCheckpointFromMutex() {
DCHECK_EQ(Thread::Current(), this);
for (;;) {
if (ReadFlag(kEmptyCheckpointRequest)) {
diff --git a/runtime/thread.cc b/runtime/thread.cc
index eba8975754..7ee0cd1d3f 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -874,7 +874,7 @@ void Thread::CreatePeer(const char* name, bool as_daemon, jobject thread_group)
ScopedObjectAccess soa(self);
StackHandleScope<1> hs(self);
MutableHandle<mirror::String> peer_thread_name(hs.NewHandle(GetThreadName()));
- if (peer_thread_name.Get() == nullptr) {
+ if (peer_thread_name == nullptr) {
// The Thread constructor should have set the Thread.name to a
// non-null value. However, because we can run without code
// available (in the compiler, in tests), we manually assign the
@@ -887,7 +887,7 @@ void Thread::CreatePeer(const char* name, bool as_daemon, jobject thread_group)
peer_thread_name.Assign(GetThreadName());
}
// 'thread_name' may have been null, so don't trust 'peer_thread_name' to be non-null.
- if (peer_thread_name.Get() != nullptr) {
+ if (peer_thread_name != nullptr) {
SetThreadName(peer_thread_name->ToModifiedUtf8().c_str());
}
}
@@ -2284,7 +2284,7 @@ class BuildInternalStackTraceVisitor : public StackVisitor {
Handle<mirror::ObjectArray<mirror::Object>> trace(
hs.NewHandle(
mirror::ObjectArray<mirror::Object>::Alloc(hs.Self(), array_class, depth + 1)));
- if (trace.Get() == nullptr) {
+ if (trace == nullptr) {
// Acquire uninterruptible_ in all paths.
self_->StartAssertNoThreadSuspension("Building internal stack trace");
self_->AssertPendingOOMException();
@@ -2479,14 +2479,14 @@ jobjectArray Thread::InternalStackTraceToStackTraceElementArray(
std::string class_name(PrettyDescriptor(descriptor));
class_name_object.Assign(
mirror::String::AllocFromModifiedUtf8(soa.Self(), class_name.c_str()));
- if (class_name_object.Get() == nullptr) {
+ if (class_name_object == nullptr) {
soa.Self()->AssertPendingOOMException();
return nullptr;
}
const char* source_file = method->GetDeclaringClassSourceFile();
if (source_file != nullptr) {
source_name_object.Assign(mirror::String::AllocFromModifiedUtf8(soa.Self(), source_file));
- if (source_name_object.Get() == nullptr) {
+ if (source_name_object == nullptr) {
soa.Self()->AssertPendingOOMException();
return nullptr;
}
@@ -2496,7 +2496,7 @@ jobjectArray Thread::InternalStackTraceToStackTraceElementArray(
CHECK(method_name != nullptr);
Handle<mirror::String> method_name_object(
hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), method_name)));
- if (method_name_object.Get() == nullptr) {
+ if (method_name_object == nullptr) {
return nullptr;
}
ObjPtr<mirror::StackTraceElement> obj =mirror::StackTraceElement::Alloc(soa.Self(),
@@ -2554,7 +2554,7 @@ void Thread::ThrowNewWrappedException(const char* exception_class_descriptor,
auto* cl = runtime->GetClassLinker();
Handle<mirror::Class> exception_class(
hs.NewHandle(cl->FindClass(this, exception_class_descriptor, class_loader)));
- if (UNLIKELY(exception_class.Get() == nullptr)) {
+ if (UNLIKELY(exception_class == nullptr)) {
CHECK(IsExceptionPending());
LOG(ERROR) << "No exception class " << PrettyDescriptor(exception_class_descriptor);
return;
@@ -2570,7 +2570,7 @@ void Thread::ThrowNewWrappedException(const char* exception_class_descriptor,
hs.NewHandle(ObjPtr<mirror::Throwable>::DownCast(exception_class->AllocObject(this))));
// If we couldn't allocate the exception, throw the pre-allocated out of memory exception.
- if (exception.Get() == nullptr) {
+ if (exception == nullptr) {
SetException(Runtime::Current()->GetPreAllocatedOutOfMemoryError());
return;
}
diff --git a/runtime/thread.h b/runtime/thread.h
index 5dd6ae1dc2..e500e0b9e4 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -176,7 +176,8 @@ class Thread {
void CheckSuspend() REQUIRES_SHARED(Locks::mutator_lock_);
// Process a pending empty checkpoint if pending.
- void CheckEmptyCheckpoint() REQUIRES_SHARED(Locks::mutator_lock_);
+ void CheckEmptyCheckpointFromWeakRefAccess(BaseMutex* cond_var_mutex);
+ void CheckEmptyCheckpointFromMutex();
static Thread* FromManagedThread(const ScopedObjectAccessAlreadyRunnable& ts,
ObjPtr<mirror::Object> thread_peer)
diff --git a/runtime/thread_list.cc b/runtime/thread_list.cc
index df8acc37a2..caed36936a 100644
--- a/runtime/thread_list.cc
+++ b/runtime/thread_list.cc
@@ -379,13 +379,15 @@ size_t ThreadList::RunCheckpoint(Closure* checkpoint_function, Closure* callback
return count;
}
-size_t ThreadList::RunEmptyCheckpoint(std::vector<uint32_t>& runnable_thread_ids) {
+void ThreadList::RunEmptyCheckpoint() {
Thread* self = Thread::Current();
Locks::mutator_lock_->AssertNotExclusiveHeld(self);
Locks::thread_list_lock_->AssertNotHeld(self);
Locks::thread_suspend_count_lock_->AssertNotHeld(self);
-
+ std::vector<uint32_t> runnable_thread_ids;
size_t count = 0;
+ Barrier* barrier = empty_checkpoint_barrier_.get();
+ barrier->Init(self, 0);
{
MutexLock mu(self, *Locks::thread_list_lock_);
MutexLock mu2(self, *Locks::thread_suspend_count_lock_);
@@ -415,8 +417,72 @@ size_t ThreadList::RunEmptyCheckpoint(std::vector<uint32_t>& runnable_thread_ids
// checkpoint request. Otherwise we will hang as they are blocking in the kRunnable state.
Runtime::Current()->GetHeap()->GetReferenceProcessor()->BroadcastForSlowPath(self);
Runtime::Current()->BroadcastForNewSystemWeaks(/*broadcast_for_checkpoint*/true);
-
- return count;
+ {
+ ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
+ uint64_t total_wait_time = 0;
+ bool first_iter = true;
+ while (true) {
+ // Wake up the runnable threads blocked on the mutexes that another thread, which is blocked
+ // on a weak ref access, holds (indirectly blocking for weak ref access through another thread
+ // and a mutex.) This needs to be done periodically because the thread may be preempted
+ // between the CheckEmptyCheckpointFromMutex call and the subsequent futex wait in
+ // Mutex::ExclusiveLock, etc. when the wakeup via WakeupToRespondToEmptyCheckpoint
+ // arrives. This could cause a *very rare* deadlock, if not repeated. Most of the cases are
+ // handled in the first iteration.
+ for (BaseMutex* mutex : Locks::expected_mutexes_on_weak_ref_access_) {
+ mutex->WakeupToRespondToEmptyCheckpoint();
+ }
+ static constexpr uint64_t kEmptyCheckpointPeriodicTimeoutMs = 100; // 100ms
+ static constexpr uint64_t kEmptyCheckpointTotalTimeoutMs = 600 * 1000; // 10 minutes.
+ size_t barrier_count = first_iter ? count : 0;
+ first_iter = false; // Don't add to the barrier count from the second iteration on.
+ bool timed_out = barrier->Increment(self, barrier_count, kEmptyCheckpointPeriodicTimeoutMs);
+ if (!timed_out) {
+ break; // Success
+ }
+ // This is a very rare case.
+ total_wait_time += kEmptyCheckpointPeriodicTimeoutMs;
+ if (kIsDebugBuild && total_wait_time > kEmptyCheckpointTotalTimeoutMs) {
+ std::ostringstream ss;
+ ss << "Empty checkpoint timeout\n";
+ ss << "Barrier count " << barrier->GetCount(self) << "\n";
+ ss << "Runnable thread IDs";
+ for (uint32_t tid : runnable_thread_ids) {
+ ss << " " << tid;
+ }
+ ss << "\n";
+ Locks::mutator_lock_->Dump(ss);
+ ss << "\n";
+ LOG(FATAL_WITHOUT_ABORT) << ss.str();
+ // Some threads in 'runnable_thread_ids' are probably stuck. Try to dump their stacks.
+ // Avoid using ThreadList::Dump() initially because it is likely to get stuck as well.
+ {
+ ScopedObjectAccess soa(self);
+ MutexLock mu1(self, *Locks::thread_list_lock_);
+ for (Thread* thread : GetList()) {
+ uint32_t tid = thread->GetThreadId();
+ bool is_in_runnable_thread_ids =
+ std::find(runnable_thread_ids.begin(), runnable_thread_ids.end(), tid) !=
+ runnable_thread_ids.end();
+ if (is_in_runnable_thread_ids &&
+ thread->ReadFlag(kEmptyCheckpointRequest)) {
+ // Found a runnable thread that hasn't responded to the empty checkpoint request.
+ // Assume it's stuck and safe to dump its stack.
+ thread->Dump(LOG_STREAM(FATAL_WITHOUT_ABORT),
+ /*dump_native_stack*/ true,
+ /*backtrace_map*/ nullptr,
+ /*force_dump_stack*/ true);
+ }
+ }
+ }
+ LOG(FATAL_WITHOUT_ABORT)
+ << "Dumped runnable threads that haven't responded to empty checkpoint.";
+ // Now use ThreadList::Dump() to dump more threads, noting it may get stuck.
+ Dump(LOG_STREAM(FATAL_WITHOUT_ABORT));
+ LOG(FATAL) << "Dumped all threads.";
+ }
+ }
+ }
}
// Request that a checkpoint function be run on all active (non-suspended)
diff --git a/runtime/thread_list.h b/runtime/thread_list.h
index b60fca1fdc..70917eb0f7 100644
--- a/runtime/thread_list.h
+++ b/runtime/thread_list.h
@@ -109,9 +109,7 @@ class ThreadList {
// in-flight mutator heap access (eg. a read barrier.) Runnable threads will respond by
// decrementing the empty checkpoint barrier count. This works even when the weak ref access is
// disabled. Only one concurrent use is currently supported.
- // In debug build, runnable_thread_ids will be populated with the thread IDS of the runnable
- // thread to wait for.
- size_t RunEmptyCheckpoint(std::vector<uint32_t>& runnable_thread_ids)
+ void RunEmptyCheckpoint()
REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_);
size_t RunCheckpointOnRunnableThreads(Closure* checkpoint_function)
diff --git a/runtime/transaction_test.cc b/runtime/transaction_test.cc
index a43c967092..97c1228038 100644
--- a/runtime/transaction_test.cc
+++ b/runtime/transaction_test.cc
@@ -35,7 +35,7 @@ class TransactionTest : public CommonRuntimeTest {
StackHandleScope<2> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
- ASSERT_TRUE(class_loader.Get() != nullptr);
+ ASSERT_TRUE(class_loader != nullptr);
// Load and initialize java.lang.ExceptionInInitializerError and the exception class used
// to abort transaction so they can be thrown during class initialization if the transaction
@@ -43,26 +43,26 @@ class TransactionTest : public CommonRuntimeTest {
MutableHandle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(),
"Ljava/lang/ExceptionInInitializerError;")));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
class_linker_->EnsureInitialized(soa.Self(), h_klass, true, true);
ASSERT_TRUE(h_klass->IsInitialized());
h_klass.Assign(class_linker_->FindSystemClass(soa.Self(),
Transaction::kAbortExceptionSignature));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
class_linker_->EnsureInitialized(soa.Self(), h_klass, true, true);
ASSERT_TRUE(h_klass->IsInitialized());
// Load and verify utility class.
h_klass.Assign(class_linker_->FindClass(soa.Self(), "LTransaction$AbortHelperClass;",
class_loader));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
class_linker_->VerifyClass(soa.Self(), h_klass);
ASSERT_TRUE(h_klass->IsVerified());
// Load and verify tested class.
h_klass.Assign(class_linker_->FindClass(soa.Self(), tested_class_signature, class_loader));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
class_linker_->VerifyClass(soa.Self(), h_klass);
ASSERT_TRUE(h_klass->IsVerified());
@@ -95,12 +95,12 @@ TEST_F(TransactionTest, Object_class) {
StackHandleScope<2> hs(soa.Self());
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
Transaction transaction;
Runtime::Current()->EnterTransactionMode(&transaction);
Handle<mirror::Object> h_obj(hs.NewHandle(h_klass->AllocObject(soa.Self())));
- ASSERT_TRUE(h_obj.Get() != nullptr);
+ ASSERT_TRUE(h_obj != nullptr);
ASSERT_EQ(h_obj->GetClass(), h_klass.Get());
Runtime::Current()->ExitTransactionMode();
@@ -115,9 +115,9 @@ TEST_F(TransactionTest, Object_monitor) {
StackHandleScope<2> hs(soa.Self());
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
Handle<mirror::Object> h_obj(hs.NewHandle(h_klass->AllocObject(soa.Self())));
- ASSERT_TRUE(h_obj.Get() != nullptr);
+ ASSERT_TRUE(h_obj != nullptr);
ASSERT_EQ(h_obj->GetClass(), h_klass.Get());
// Lock object's monitor outside the transaction.
@@ -144,7 +144,7 @@ TEST_F(TransactionTest, Array_length) {
StackHandleScope<2> hs(soa.Self());
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
constexpr int32_t kArraySize = 2;
@@ -157,7 +157,7 @@ TEST_F(TransactionTest, Array_length) {
mirror::Array::Alloc<true>(soa.Self(), h_klass.Get(), kArraySize,
h_klass->GetComponentSizeShift(),
Runtime::Current()->GetHeap()->GetCurrentAllocator())));
- ASSERT_TRUE(h_obj.Get() != nullptr);
+ ASSERT_TRUE(h_obj != nullptr);
ASSERT_EQ(h_obj->GetClass(), h_klass.Get());
Runtime::Current()->ExitTransactionMode();
@@ -172,11 +172,11 @@ TEST_F(TransactionTest, StaticFieldsTest) {
StackHandleScope<4> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(LoadDex("Transaction"))));
- ASSERT_TRUE(class_loader.Get() != nullptr);
+ ASSERT_TRUE(class_loader != nullptr);
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindClass(soa.Self(), "LStaticFieldsTest;", class_loader)));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
bool success = class_linker_->EnsureInitialized(soa.Self(), h_klass, true, true);
ASSERT_TRUE(success);
ASSERT_TRUE(h_klass->IsInitialized());
@@ -232,9 +232,9 @@ TEST_F(TransactionTest, StaticFieldsTest) {
// Create a java.lang.Object instance to set objectField.
Handle<mirror::Class> object_klass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
- ASSERT_TRUE(object_klass.Get() != nullptr);
+ ASSERT_TRUE(object_klass != nullptr);
Handle<mirror::Object> h_obj(hs.NewHandle(h_klass->AllocObject(soa.Self())));
- ASSERT_TRUE(h_obj.Get() != nullptr);
+ ASSERT_TRUE(h_obj != nullptr);
ASSERT_EQ(h_obj->GetClass(), h_klass.Get());
// Modify fields inside transaction then rollback changes.
@@ -270,11 +270,11 @@ TEST_F(TransactionTest, InstanceFieldsTest) {
StackHandleScope<5> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(LoadDex("Transaction"))));
- ASSERT_TRUE(class_loader.Get() != nullptr);
+ ASSERT_TRUE(class_loader != nullptr);
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindClass(soa.Self(), "LInstanceFieldsTest;", class_loader)));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
bool success = class_linker_->EnsureInitialized(soa.Self(), h_klass, true, true);
ASSERT_TRUE(success);
ASSERT_TRUE(h_klass->IsInitialized());
@@ -282,7 +282,7 @@ TEST_F(TransactionTest, InstanceFieldsTest) {
// Allocate an InstanceFieldTest object.
Handle<mirror::Object> h_instance(hs.NewHandle(h_klass->AllocObject(soa.Self())));
- ASSERT_TRUE(h_instance.Get() != nullptr);
+ ASSERT_TRUE(h_instance != nullptr);
// Lookup fields.
ArtField* booleanField = h_klass->FindDeclaredInstanceField("booleanField", "Z");
@@ -334,9 +334,9 @@ TEST_F(TransactionTest, InstanceFieldsTest) {
// Create a java.lang.Object instance to set objectField.
Handle<mirror::Class> object_klass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
- ASSERT_TRUE(object_klass.Get() != nullptr);
+ ASSERT_TRUE(object_klass != nullptr);
Handle<mirror::Object> h_obj(hs.NewHandle(h_klass->AllocObject(soa.Self())));
- ASSERT_TRUE(h_obj.Get() != nullptr);
+ ASSERT_TRUE(h_obj != nullptr);
ASSERT_EQ(h_obj->GetClass(), h_klass.Get());
// Modify fields inside transaction then rollback changes.
@@ -372,11 +372,11 @@ TEST_F(TransactionTest, StaticArrayFieldsTest) {
StackHandleScope<4> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(LoadDex("Transaction"))));
- ASSERT_TRUE(class_loader.Get() != nullptr);
+ ASSERT_TRUE(class_loader != nullptr);
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindClass(soa.Self(), "LStaticArrayFieldsTest;", class_loader)));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
bool success = class_linker_->EnsureInitialized(soa.Self(), h_klass, true, true);
ASSERT_TRUE(success);
ASSERT_TRUE(h_klass->IsInitialized());
@@ -451,9 +451,9 @@ TEST_F(TransactionTest, StaticArrayFieldsTest) {
// Create a java.lang.Object instance to set objectField.
Handle<mirror::Class> object_klass(
hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
- ASSERT_TRUE(object_klass.Get() != nullptr);
+ ASSERT_TRUE(object_klass != nullptr);
Handle<mirror::Object> h_obj(hs.NewHandle(h_klass->AllocObject(soa.Self())));
- ASSERT_TRUE(h_obj.Get() != nullptr);
+ ASSERT_TRUE(h_obj != nullptr);
ASSERT_EQ(h_obj->GetClass(), h_klass.Get());
// Modify fields inside transaction then rollback changes.
@@ -489,15 +489,15 @@ TEST_F(TransactionTest, ResolveString) {
StackHandleScope<3> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(LoadDex("Transaction"))));
- ASSERT_TRUE(class_loader.Get() != nullptr);
+ ASSERT_TRUE(class_loader != nullptr);
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindClass(soa.Self(), "LTransaction$ResolveString;",
class_loader)));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(h_klass->GetDexCache()));
- ASSERT_TRUE(h_dex_cache.Get() != nullptr);
+ ASSERT_TRUE(h_dex_cache != nullptr);
const DexFile* const dex_file = h_dex_cache->GetDexFile();
ASSERT_TRUE(dex_file != nullptr);
@@ -538,12 +538,12 @@ TEST_F(TransactionTest, EmptyClass) {
StackHandleScope<2> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(LoadDex("Transaction"))));
- ASSERT_TRUE(class_loader.Get() != nullptr);
+ ASSERT_TRUE(class_loader != nullptr);
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindClass(soa.Self(), "LTransaction$EmptyStatic;",
class_loader)));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
class_linker_->VerifyClass(soa.Self(), h_klass);
ASSERT_TRUE(h_klass->IsVerified());
@@ -562,12 +562,12 @@ TEST_F(TransactionTest, StaticFieldClass) {
StackHandleScope<2> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(LoadDex("Transaction"))));
- ASSERT_TRUE(class_loader.Get() != nullptr);
+ ASSERT_TRUE(class_loader != nullptr);
Handle<mirror::Class> h_klass(
hs.NewHandle(class_linker_->FindClass(soa.Self(), "LTransaction$StaticFieldClass;",
class_loader)));
- ASSERT_TRUE(h_klass.Get() != nullptr);
+ ASSERT_TRUE(h_klass != nullptr);
class_linker_->VerifyClass(soa.Self(), h_klass);
ASSERT_TRUE(h_klass->IsVerified());
diff --git a/runtime/utils.h b/runtime/utils.h
index 67438b5881..96e5bfa8ec 100644
--- a/runtime/utils.h
+++ b/runtime/utils.h
@@ -301,6 +301,30 @@ constexpr PointerSize ConvertToPointerSize(T any) {
}
}
+// Returns a type cast pointer if object pointed to is within the provided bounds.
+// Otherwise returns nullptr.
+template <typename T>
+inline static T BoundsCheckedCast(const void* pointer,
+ const void* lower,
+ const void* upper) {
+ const uint8_t* bound_begin = static_cast<const uint8_t*>(lower);
+ const uint8_t* bound_end = static_cast<const uint8_t*>(upper);
+ DCHECK(bound_begin <= bound_end);
+
+ T result = reinterpret_cast<T>(pointer);
+ const uint8_t* begin = static_cast<const uint8_t*>(pointer);
+ const uint8_t* end = begin + sizeof(*result);
+ if (begin < bound_begin || end > bound_end || begin > end) {
+ return nullptr;
+ }
+ return result;
+}
+
+template <typename T, size_t size>
+constexpr size_t ArrayCount(const T (&)[size]) {
+ return size;
+}
+
} // namespace art
#endif // ART_RUNTIME_UTILS_H_
diff --git a/runtime/utils/dex_cache_arrays_layout-inl.h b/runtime/utils/dex_cache_arrays_layout-inl.h
index bd1b044dae..98658215f7 100644
--- a/runtime/utils/dex_cache_arrays_layout-inl.h
+++ b/runtime/utils/dex_cache_arrays_layout-inl.h
@@ -29,7 +29,8 @@
namespace art {
inline DexCacheArraysLayout::DexCacheArraysLayout(PointerSize pointer_size,
- const DexFile::Header& header)
+ const DexFile::Header& header,
+ uint32_t num_call_sites)
: pointer_size_(pointer_size),
/* types_offset_ is always 0u, so it's constexpr */
methods_offset_(
@@ -40,12 +41,14 @@ inline DexCacheArraysLayout::DexCacheArraysLayout(PointerSize pointer_size,
RoundUp(strings_offset_ + StringsSize(header.string_ids_size_), FieldsAlignment())),
method_types_offset_(
RoundUp(fields_offset_ + FieldsSize(header.field_ids_size_), MethodTypesAlignment())),
- size_(
- RoundUp(method_types_offset_ + MethodTypesSize(header.proto_ids_size_), Alignment())) {
+ call_sites_offset_(
+ RoundUp(method_types_offset_ + MethodTypesSize(header.proto_ids_size_),
+ MethodTypesAlignment())),
+ size_(RoundUp(call_sites_offset_ + CallSitesSize(num_call_sites), Alignment())) {
}
inline DexCacheArraysLayout::DexCacheArraysLayout(PointerSize pointer_size, const DexFile* dex_file)
- : DexCacheArraysLayout(pointer_size, dex_file->GetHeader()) {
+ : DexCacheArraysLayout(pointer_size, dex_file->GetHeader(), dex_file->NumCallSiteIds()) {
}
inline constexpr size_t DexCacheArraysLayout::Alignment() {
@@ -131,10 +134,18 @@ inline size_t DexCacheArraysLayout::MethodTypesSize(size_t num_elements) const {
inline size_t DexCacheArraysLayout::MethodTypesAlignment() const {
static_assert(alignof(mirror::MethodTypeDexCacheType) == 8,
- "alignof(MethodTypeDexCacheType) != 8");
+ "Expecting alignof(MethodTypeDexCacheType) == 8");
return alignof(mirror::MethodTypeDexCacheType);
}
+inline size_t DexCacheArraysLayout::CallSitesSize(size_t num_elements) const {
+ return ArraySize(GcRootAsPointerSize<mirror::CallSite>(), num_elements);
+}
+
+inline size_t DexCacheArraysLayout::CallSitesAlignment() const {
+ return alignof(GcRoot<mirror::CallSite>);
+}
+
inline size_t DexCacheArraysLayout::ElementOffset(PointerSize element_size, uint32_t idx) {
return static_cast<size_t>(element_size) * idx;
}
diff --git a/runtime/utils/dex_cache_arrays_layout.h b/runtime/utils/dex_cache_arrays_layout.h
index 7d4b23a8dd..ed677ed3f4 100644
--- a/runtime/utils/dex_cache_arrays_layout.h
+++ b/runtime/utils/dex_cache_arrays_layout.h
@@ -37,11 +37,14 @@ class DexCacheArraysLayout {
strings_offset_(0u),
fields_offset_(0u),
method_types_offset_(0u),
+ call_sites_offset_(0u),
size_(0u) {
}
// Construct a layout for a particular dex file header.
- DexCacheArraysLayout(PointerSize pointer_size, const DexFile::Header& header);
+ DexCacheArraysLayout(PointerSize pointer_size,
+ const DexFile::Header& header,
+ uint32_t num_call_sites);
// Construct a layout for a particular dex file.
DexCacheArraysLayout(PointerSize pointer_size, const DexFile* dex_file);
@@ -104,6 +107,14 @@ class DexCacheArraysLayout {
size_t MethodTypesAlignment() const;
+ size_t CallSitesOffset() const {
+ return call_sites_offset_;
+ }
+
+ size_t CallSitesSize(size_t num_elements) const;
+
+ size_t CallSitesAlignment() const;
+
private:
static constexpr size_t types_offset_ = 0u;
const PointerSize pointer_size_; // Must be first for construction initialization order.
@@ -111,6 +122,7 @@ class DexCacheArraysLayout {
const size_t strings_offset_;
const size_t fields_offset_;
const size_t method_types_offset_;
+ const size_t call_sites_offset_;
const size_t size_;
static size_t Alignment(PointerSize pointer_size);
diff --git a/runtime/utils_test.cc b/runtime/utils_test.cc
index 02f1e1bbfe..634bd47f05 100644
--- a/runtime/utils_test.cc
+++ b/runtime/utils_test.cc
@@ -408,4 +408,23 @@ TEST_F(UtilsTest, IsValidDescriptor) {
IsValidDescriptor(reinterpret_cast<char*>(&unpaired_surrogate_with_multibyte_sequence[0])));
}
+TEST_F(UtilsTest, ArrayCount) {
+ int i[64];
+ EXPECT_EQ(ArrayCount(i), 64u);
+ char c[7];
+ EXPECT_EQ(ArrayCount(c), 7u);
+}
+
+TEST_F(UtilsTest, BoundsCheckedCast) {
+ char buffer[64];
+ const char* buffer_end = buffer + ArrayCount(buffer);
+ EXPECT_EQ(BoundsCheckedCast<const uint64_t*>(nullptr, buffer, buffer_end), nullptr);
+ EXPECT_EQ(BoundsCheckedCast<const uint64_t*>(buffer, buffer, buffer_end),
+ reinterpret_cast<const uint64_t*>(buffer));
+ EXPECT_EQ(BoundsCheckedCast<const uint64_t*>(buffer + 56, buffer, buffer_end),
+ reinterpret_cast<const uint64_t*>(buffer + 56));
+ EXPECT_EQ(BoundsCheckedCast<const uint64_t*>(buffer - 1, buffer, buffer_end), nullptr);
+ EXPECT_EQ(BoundsCheckedCast<const uint64_t*>(buffer + 57, buffer, buffer_end), nullptr);
+}
+
} // namespace art
diff --git a/runtime/verifier/method_verifier.cc b/runtime/verifier/method_verifier.cc
index 5f55f3fd29..16739fa3bc 100644
--- a/runtime/verifier/method_verifier.cc
+++ b/runtime/verifier/method_verifier.cc
@@ -3114,6 +3114,44 @@ bool MethodVerifier::CodeFlowVerifyInstruction(uint32_t* start_guess) {
just_set_result = true;
break;
}
+ case Instruction::INVOKE_CUSTOM:
+ case Instruction::INVOKE_CUSTOM_RANGE: {
+ // Verify registers based on method_type in the call site.
+ bool is_range = (inst->Opcode() == Instruction::INVOKE_CUSTOM_RANGE);
+
+ // Step 1. Check the call site that produces the method handle for invocation
+ const uint32_t call_site_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
+ if (!CheckCallSite(call_site_idx)) {
+ DCHECK(HasFailures());
+ break;
+ }
+
+ // Step 2. Check the register arguments correspond to the expected arguments for the
+ // method handle produced by step 1. The dex file verifier has checked ranges for
+ // the first three arguments and CheckCallSite has checked the method handle type.
+ CallSiteArrayValueIterator it(*dex_file_, dex_file_->GetCallSiteId(call_site_idx));
+ it.Next(); // Skip to name.
+ it.Next(); // Skip to method type of the method handle
+ const uint32_t proto_idx = static_cast<uint32_t>(it.GetJavaValue().i);
+ const DexFile::ProtoId& proto_id = dex_file_->GetProtoId(proto_idx);
+ DexFileParameterIterator param_it(*dex_file_, proto_id);
+ // Treat method as static as it has yet to be determined.
+ VerifyInvocationArgsFromIterator(&param_it, inst, METHOD_STATIC, is_range, nullptr);
+ const char* return_descriptor = dex_file_->GetReturnTypeDescriptor(proto_id);
+
+ // Step 3. Propagate return type information
+ const RegType& return_type =
+ reg_types_.FromDescriptor(GetClassLoader(), return_descriptor, false);
+ if (!return_type.IsLowHalf()) {
+ work_line_->SetResultRegisterType(this, return_type);
+ } else {
+ work_line_->SetResultRegisterTypeWide(return_type, return_type.HighHalf(&reg_types_));
+ }
+ just_set_result = true;
+ // TODO: Add compiler support for invoke-custom (b/35337872).
+ Fail(VERIFY_ERROR_FORCE_INTERPRETER);
+ break;
+ }
case Instruction::NEG_INT:
case Instruction::NOT_INT:
work_line_->CheckUnaryOp(this, inst, reg_types_.Integer(), reg_types_.Integer());
@@ -3423,7 +3461,7 @@ bool MethodVerifier::CodeFlowVerifyInstruction(uint32_t* start_guess) {
/* These should never appear during verification. */
case Instruction::UNUSED_3E ... Instruction::UNUSED_43:
case Instruction::UNUSED_F3 ... Instruction::UNUSED_F9:
- case Instruction::UNUSED_FC ... Instruction::UNUSED_FF:
+ case Instruction::UNUSED_FE ... Instruction::UNUSED_FF:
case Instruction::UNUSED_79:
case Instruction::UNUSED_7A:
Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Unexpected opcode " << inst->DumpString(dex_file_);
@@ -4094,6 +4132,116 @@ void MethodVerifier::VerifyInvocationArgsUnresolvedMethod(const Instruction* ins
VerifyInvocationArgsFromIterator(&it, inst, method_type, is_range, nullptr);
}
+bool MethodVerifier::CheckCallSite(uint32_t call_site_idx) {
+ CallSiteArrayValueIterator it(*dex_file_, dex_file_->GetCallSiteId(call_site_idx));
+ // Check essential arguments are provided. The dex file verifier has verified indicies of the
+ // main values (method handle, name, method_type).
+ if (it.Size() < 3) {
+ Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Call site #" << call_site_idx
+ << " has too few arguments: "
+ << it.Size() << "< 3";
+ return false;
+ }
+
+ // Get and check the first argument: the method handle.
+ uint32_t method_handle_idx = static_cast<uint32_t>(it.GetJavaValue().i);
+ it.Next();
+ const DexFile::MethodHandleItem& mh = dex_file_->GetMethodHandle(method_handle_idx);
+ if (mh.method_handle_type_ != static_cast<uint16_t>(DexFile::MethodHandleType::kInvokeStatic)) {
+ Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Call site #" << call_site_idx
+ << " argument 0 method handle type is not InvokeStatic";
+ return false;
+ }
+
+ // Skip the second argument, the name to resolve, as checked by the
+ // dex file verifier.
+ it.Next();
+
+ // Skip the third argument, the method type expected, as checked by
+ // the dex file verifier.
+ it.Next();
+
+ // Check the bootstrap method handle and remaining arguments.
+ const DexFile::MethodId& method_id = dex_file_->GetMethodId(mh.field_or_method_idx_);
+ uint32_t length;
+ const char* shorty = dex_file_->GetMethodShorty(method_id, &length);
+
+ if (it.Size() < length - 1) {
+ Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Call site #" << call_site_idx
+ << " too few arguments for bootstrap method: "
+ << it.Size() << " < " << (length - 1);
+ return false;
+ }
+
+ // Check the return type and first 3 arguments are references
+ // (CallSite, Lookup, String, MethodType). If they are not of the
+ // expected types (or subtypes), it will trigger a
+ // WrongMethodTypeException during execution.
+ if (shorty[0] != 'L') {
+ Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Call site #" << call_site_idx
+ << " bootstrap return type is not a reference";
+ return false;
+ }
+
+ for (uint32_t i = 1; i < 4; ++i) {
+ if (shorty[i] != 'L') {
+ Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Call site #" << call_site_idx
+ << " bootstrap method argument " << (i - 1)
+ << " is not a reference";
+ return false;
+ }
+ }
+
+ // Check the optional arguments.
+ for (uint32_t i = 4; i < length; ++i, it.Next()) {
+ bool match = false;
+ switch (it.GetValueType()) {
+ case EncodedArrayValueIterator::ValueType::kBoolean:
+ case EncodedArrayValueIterator::ValueType::kByte:
+ case EncodedArrayValueIterator::ValueType::kShort:
+ case EncodedArrayValueIterator::ValueType::kChar:
+ case EncodedArrayValueIterator::ValueType::kInt:
+ // These all fit within one register and encoders do not seem
+ // too exacting on the encoding type they use (ie using
+ // integer for all of these).
+ match = (strchr("ZBCSI", shorty[i]) != nullptr);
+ break;
+ case EncodedArrayValueIterator::ValueType::kLong:
+ match = ('J' == shorty[i]);
+ break;
+ case EncodedArrayValueIterator::ValueType::kFloat:
+ match = ('F' == shorty[i]);
+ break;
+ case EncodedArrayValueIterator::ValueType::kDouble:
+ match = ('D' == shorty[i]);
+ break;
+ case EncodedArrayValueIterator::ValueType::kMethodType:
+ case EncodedArrayValueIterator::ValueType::kMethodHandle:
+ case EncodedArrayValueIterator::ValueType::kString:
+ case EncodedArrayValueIterator::ValueType::kType:
+ case EncodedArrayValueIterator::ValueType::kNull:
+ match = ('L' == shorty[i]);
+ break;
+ case EncodedArrayValueIterator::ValueType::kField:
+ case EncodedArrayValueIterator::ValueType::kMethod:
+ case EncodedArrayValueIterator::ValueType::kEnum:
+ case EncodedArrayValueIterator::ValueType::kArray:
+ case EncodedArrayValueIterator::ValueType::kAnnotation:
+ // Unreachable based on current EncodedArrayValueIterator::Next().
+ UNREACHABLE();
+ }
+
+ if (!match) {
+ Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Call site #" << call_site_idx
+ << " bootstrap method argument " << (i - 1)
+ << " expected " << shorty[i]
+ << " got value type: " << it.GetValueType();
+ return false;
+ }
+ }
+ return true;
+}
+
class MethodParamListDescriptorIterator {
public:
explicit MethodParamListDescriptorIterator(ArtMethod* res_method) :
diff --git a/runtime/verifier/method_verifier.h b/runtime/verifier/method_verifier.h
index fa5a698423..7b67967c28 100644
--- a/runtime/verifier/method_verifier.h
+++ b/runtime/verifier/method_verifier.h
@@ -697,6 +697,11 @@ class MethodVerifier {
REQUIRES_SHARED(Locks::mutator_lock_);
/*
+ * Verify the arguments present for a call site. Returns "true" if all is well, "false" otherwise.
+ */
+ bool CheckCallSite(uint32_t call_site_idx);
+
+ /*
* Verify that the target instruction is not "move-exception". It's important that the only way
* to execute a move-exception is as the first instruction of an exception handler.
* Returns "true" if all is well, "false" if the target instruction is move-exception.
diff --git a/runtime/verifier/verifier_deps.cc b/runtime/verifier/verifier_deps.cc
index 113160785c..000cf7c393 100644
--- a/runtime/verifier/verifier_deps.cc
+++ b/runtime/verifier/verifier_deps.cc
@@ -890,12 +890,12 @@ bool VerifierDeps::VerifyAssignability(Handle<mirror::ClassLoader> class_loader,
source.Assign(
FindClassAndClearException(class_linker, self, source_desc.c_str(), class_loader));
- if (destination.Get() == nullptr) {
+ if (destination == nullptr) {
LOG(INFO) << "VerifiersDeps: Could not resolve class " << destination_desc;
return false;
}
- if (source.Get() == nullptr) {
+ if (source == nullptr) {
LOG(INFO) << "VerifierDeps: Could not resolve class " << source_desc;
return false;
}
@@ -925,7 +925,7 @@ bool VerifierDeps::VerifyClasses(Handle<mirror::ClassLoader> class_loader,
cls.Assign(FindClassAndClearException(class_linker, self, descriptor, class_loader));
if (entry.IsResolved()) {
- if (cls.Get() == nullptr) {
+ if (cls == nullptr) {
LOG(INFO) << "VerifierDeps: Could not resolve class " << descriptor;
return false;
} else if (entry.GetAccessFlags() != GetAccessFlags(cls.Get())) {
@@ -939,7 +939,7 @@ bool VerifierDeps::VerifyClasses(Handle<mirror::ClassLoader> class_loader,
<< std::dec;
return false;
}
- } else if (cls.Get() != nullptr) {
+ } else if (cls != nullptr) {
LOG(INFO) << "VerifierDeps: Unexpected successful resolution of class " << descriptor;
return false;
}
diff --git a/runtime/zip_archive.cc b/runtime/zip_archive.cc
index cd79bb61f3..416873f4c3 100644
--- a/runtime/zip_archive.cc
+++ b/runtime/zip_archive.cc
@@ -23,10 +23,16 @@
#include <unistd.h>
#include <vector>
+#include "android-base/stringprintf.h"
#include "base/unix_file/fd_file.h"
namespace art {
+// Log file contents and mmap info when mapping entries directly.
+static constexpr const bool kDebugZipMapDirectly = false;
+
+using android::base::StringPrintf;
+
uint32_t ZipEntry::GetUncompressedLength() {
return zip_entry_->uncompressed_length;
}
@@ -35,6 +41,15 @@ uint32_t ZipEntry::GetCrc32() {
return zip_entry_->crc32;
}
+bool ZipEntry::IsUncompressed() {
+ return zip_entry_->method == kCompressStored;
+}
+
+bool ZipEntry::IsAlignedTo(size_t alignment) {
+ DCHECK(IsPowerOfTwo(alignment)) << alignment;
+ return IsAlignedParam(zip_entry_->offset, static_cast<int>(alignment));
+}
+
ZipEntry::~ZipEntry() {
delete zip_entry_;
}
@@ -73,6 +88,102 @@ MemMap* ZipEntry::ExtractToMemMap(const char* zip_filename, const char* entry_fi
return map.release();
}
+MemMap* ZipEntry::MapDirectlyFromFile(const char* zip_filename, std::string* error_msg) {
+ const int zip_fd = GetFileDescriptor(handle_);
+ const char* entry_filename = entry_name_.c_str();
+
+ // Should not happen since we don't have a memory ZipArchive constructor.
+ // However the underlying ZipArchive isn't required to have an FD,
+ // so check to be sure.
+ CHECK_GE(zip_fd, 0) <<
+ StringPrintf("Cannot map '%s' (in zip '%s') directly because the zip archive "
+ "is not file backed.",
+ entry_filename,
+ zip_filename);
+
+ if (!IsUncompressed()) {
+ *error_msg = StringPrintf("Cannot map '%s' (in zip '%s') directly because it is compressed.",
+ entry_filename,
+ zip_filename);
+ return nullptr;
+ } else if (zip_entry_->uncompressed_length != zip_entry_->compressed_length) {
+ *error_msg = StringPrintf("Cannot map '%s' (in zip '%s') directly because "
+ "entry has bad size (%u != %u).",
+ entry_filename,
+ zip_filename,
+ zip_entry_->uncompressed_length,
+ zip_entry_->compressed_length);
+ return nullptr;
+ }
+
+ std::string name(entry_filename);
+ name += " mapped directly in memory from ";
+ name += zip_filename;
+
+ const off_t offset = zip_entry_->offset;
+
+ if (kDebugZipMapDirectly) {
+ LOG(INFO) << "zip_archive: " << "make mmap of " << name << " @ offset = " << offset;
+ }
+
+ std::unique_ptr<MemMap> map(
+ MemMap::MapFileAtAddress(nullptr, // Expected pointer address
+ GetUncompressedLength(), // Byte count
+ PROT_READ | PROT_WRITE,
+ MAP_PRIVATE,
+ zip_fd,
+ offset,
+ false, // Don't restrict allocation to lower4GB
+ false, // Doesn't overlap existing map (reuse=false)
+ name.c_str(),
+ /*out*/error_msg));
+
+ if (map == nullptr) {
+ DCHECK(!error_msg->empty());
+ }
+
+ if (kDebugZipMapDirectly) {
+ // Dump contents of file, same format as using this shell command:
+ // $> od -j <offset> -t x1 <zip_filename>
+ static constexpr const int kMaxDumpChars = 15;
+ lseek(zip_fd, 0, SEEK_SET);
+
+ int count = offset + kMaxDumpChars;
+
+ std::string tmp;
+ char buf;
+
+ // Dump file contents.
+ int i = 0;
+ while (read(zip_fd, &buf, 1) > 0 && i < count) {
+ tmp += StringPrintf("%3d ", (unsigned int)buf);
+ ++i;
+ }
+
+ LOG(INFO) << "map_fd raw bytes starting at 0";
+ LOG(INFO) << "" << tmp;
+ LOG(INFO) << "---------------------------";
+
+ // Dump map contents.
+ if (map != nullptr) {
+ tmp = "";
+
+ count = kMaxDumpChars;
+
+ uint8_t* begin = map->Begin();
+ for (i = 0; i < count; ++i) {
+ tmp += StringPrintf("%3d ", (unsigned int)begin[i]);
+ }
+
+ LOG(INFO) << "map address " << StringPrintf("%p", begin);
+ LOG(INFO) << "map first " << kMaxDumpChars << " chars:";
+ LOG(INFO) << tmp;
+ }
+ }
+
+ return map.release();
+}
+
static void SetCloseOnExec(int fd) {
// This dance is more portable than Linux's O_CLOEXEC open(2) flag.
int flags = fcntl(fd, F_GETFD);
@@ -129,7 +240,7 @@ ZipEntry* ZipArchive::Find(const char* name, std::string* error_msg) const {
return nullptr;
}
- return new ZipEntry(handle_, zip_entry.release());
+ return new ZipEntry(handle_, zip_entry.release(), name);
}
ZipArchive::~ZipArchive() {
diff --git a/runtime/zip_archive.h b/runtime/zip_archive.h
index 42bf55cb3f..18584447e8 100644
--- a/runtime/zip_archive.h
+++ b/runtime/zip_archive.h
@@ -37,19 +37,35 @@ class MemMap;
class ZipEntry {
public:
bool ExtractToFile(File& file, std::string* error_msg);
+ // Extract this entry to anonymous memory (R/W).
+ // Returns null on failure and sets error_msg.
MemMap* ExtractToMemMap(const char* zip_filename, const char* entry_filename,
std::string* error_msg);
+ // Create a file-backed private (clean, R/W) memory mapping to this entry.
+ // 'zip_filename' is used for diagnostics only,
+ // the original file that the ZipArchive was open with is used
+ // for the mapping.
+ //
+ // Will only succeed if the entry is stored uncompressed.
+ // Returns null on failure and sets error_msg.
+ MemMap* MapDirectlyFromFile(const char* zip_filename, /*out*/std::string* error_msg);
virtual ~ZipEntry();
uint32_t GetUncompressedLength();
uint32_t GetCrc32();
+ bool IsUncompressed();
+ bool IsAlignedTo(size_t alignment);
+
private:
ZipEntry(ZipArchiveHandle handle,
- ::ZipEntry* zip_entry) : handle_(handle), zip_entry_(zip_entry) {}
+ ::ZipEntry* zip_entry,
+ const std::string& entry_name)
+ : handle_(handle), zip_entry_(zip_entry), entry_name_(entry_name) {}
ZipArchiveHandle handle_;
::ZipEntry* const zip_entry_;
+ std::string const entry_name_;
friend class ZipArchive;
DISALLOW_COPY_AND_ASSIGN(ZipEntry);