diff options
75 files changed, 2401 insertions, 2238 deletions
diff --git a/build/Android.oat.mk b/build/Android.oat.mk index ba3ef053de..08b1e10268 100644 --- a/build/Android.oat.mk +++ b/build/Android.oat.mk @@ -96,7 +96,7 @@ $$(core_image_name): PRIVATE_CORE_MULTI_PARAM := $$(core_multi_param) $$(core_image_name): $$(HOST_CORE_DEX_LOCATIONS) $$(core_dex2oat_dependency) @echo "host dex2oat: $$@" @mkdir -p $$(dir $$@) - $$(hide) $$(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \ + $$(hide) ANDROID_LOG_TAGS="*:e" $$(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \ --runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \ --image-classes=$$(PRELOADED_CLASSES) $$(addprefix --dex-file=,$$(HOST_CORE_DEX_FILES)) \ $$(addprefix --dex-location=,$$(HOST_CORE_DEX_LOCATIONS)) --oat-file=$$(PRIVATE_CORE_OAT_NAME) \ diff --git a/compiler/dex/dex_to_dex_decompiler_test.cc b/compiler/dex/dex_to_dex_decompiler_test.cc index d4a9ba5491..76250d202c 100644 --- a/compiler/dex/dex_to_dex_decompiler_test.cc +++ b/compiler/dex/dex_to_dex_decompiler_test.cc @@ -16,6 +16,7 @@ #include "dex_to_dex_decompiler.h" +#include "base/casts.h" #include "class_linker.h" #include "common_compiler_test.h" #include "compiled_method-inl.h" @@ -26,6 +27,7 @@ #include "driver/compiler_options.h" #include "handle_scope-inl.h" #include "mirror/class_loader.h" +#include "quick_compiler_callbacks.h" #include "runtime.h" #include "scoped_thread_state_change-inl.h" #include "thread.h" @@ -43,7 +45,7 @@ class DexToDexDecompilerTest : public CommonCompilerTest { compiler_options_->SetCompilerFilter(CompilerFilter::kQuicken); // Create the main VerifierDeps, here instead of in the compiler since we want to aggregate // the results for all the dex files, not just the results for the current dex file. - Runtime::Current()->GetCompilerCallbacks()->SetVerifierDeps( + down_cast<QuickCompilerCallbacks*>(Runtime::Current()->GetCompilerCallbacks())->SetVerifierDeps( new verifier::VerifierDeps(GetDexFiles(class_loader))); compiler_driver_->SetDexFilesForOatFile(GetDexFiles(class_loader)); compiler_driver_->CompileAll(class_loader, GetDexFiles(class_loader), &timings); diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc index 58a35dde8e..fb5d9332d0 100644 --- a/compiler/optimizing/stack_map_stream.cc +++ b/compiler/optimizing/stack_map_stream.cc @@ -56,14 +56,14 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, DCHECK_EQ(num_dex_registers_, num_dex_registers) << "Inconsistent register count"; } - current_stack_map_ = BitTableBuilder<StackMap::kCount>::Entry(); + current_stack_map_ = BitTableBuilder<StackMap>::Entry(); current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind); current_stack_map_[StackMap::kPackedNativePc] = StackMap::PackNativePc(native_pc_offset, instruction_set_); current_stack_map_[StackMap::kDexPc] = dex_pc; if (register_mask != 0) { uint32_t shift = LeastSignificantBit(register_mask); - BitTableBuilder<RegisterMask::kCount>::Entry entry; + BitTableBuilder<RegisterMask>::Entry entry; entry[RegisterMask::kValue] = register_mask >> shift; entry[RegisterMask::kShift] = shift; current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry); @@ -126,7 +126,7 @@ void StackMapStream::EndStackMapEntry() { void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) { uint32_t packed_native_pc = current_stack_map_[StackMap::kPackedNativePc]; size_t invoke_info_index = invoke_infos_.size(); - BitTableBuilder<InvokeInfo::kCount>::Entry entry; + BitTableBuilder<InvokeInfo>::Entry entry; entry[InvokeInfo::kPackedNativePc] = packed_native_pc; entry[InvokeInfo::kInvokeType] = invoke_type; entry[InvokeInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index}); @@ -153,7 +153,7 @@ void StackMapStream::BeginInlineInfoEntry(ArtMethod* method, expected_num_dex_registers_ += num_dex_registers; - BitTableBuilder<InlineInfo::kCount>::Entry entry; + BitTableBuilder<InlineInfo>::Entry entry; entry[InlineInfo::kIsLast] = InlineInfo::kMore; entry[InlineInfo::kDexPc] = dex_pc; entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_); @@ -215,7 +215,7 @@ void StackMapStream::CreateDexRegisterMap() { // Distance is difference between this index and the index of last modification. uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i]; if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) { - BitTableBuilder<DexRegisterInfo::kCount>::Entry entry; + BitTableBuilder<DexRegisterInfo>::Entry entry; entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind()); entry[DexRegisterInfo::kPackedValue] = DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()); diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h index 6842d9fd7e..7d1820d67f 100644 --- a/compiler/optimizing/stack_map_stream.h +++ b/compiler/optimizing/stack_map_stream.h @@ -103,26 +103,26 @@ class StackMapStream : public ValueObject { void CreateDexRegisterMap(); const InstructionSet instruction_set_; - BitTableBuilder<StackMap::kCount> stack_maps_; - BitTableBuilder<RegisterMask::kCount> register_masks_; + BitTableBuilder<StackMap> stack_maps_; + BitTableBuilder<RegisterMask> register_masks_; BitmapTableBuilder stack_masks_; - BitTableBuilder<InvokeInfo::kCount> invoke_infos_; - BitTableBuilder<InlineInfo::kCount> inline_infos_; + BitTableBuilder<InvokeInfo> invoke_infos_; + BitTableBuilder<InlineInfo> inline_infos_; BitmapTableBuilder dex_register_masks_; - BitTableBuilder<MaskInfo::kCount> dex_register_maps_; - BitTableBuilder<DexRegisterInfo::kCount> dex_register_catalog_; + BitTableBuilder<MaskInfo> dex_register_maps_; + BitTableBuilder<DexRegisterInfo> dex_register_catalog_; uint32_t num_dex_registers_ = 0; // TODO: Make this const and get the value in constructor. ScopedArenaVector<uint8_t> out_; - BitTableBuilder<1> method_infos_; + BitTableBuilderBase<1> method_infos_; ScopedArenaVector<BitVector*> lazy_stack_masks_; // Variables which track the current state between Begin/End calls; bool in_stack_map_; bool in_inline_info_; - BitTableBuilder<StackMap::kCount>::Entry current_stack_map_; - ScopedArenaVector<BitTableBuilder<InlineInfo::kCount>::Entry> current_inline_infos_; + BitTableBuilder<StackMap>::Entry current_stack_map_; + ScopedArenaVector<BitTableBuilder<InlineInfo>::Entry> current_inline_infos_; ScopedArenaVector<DexRegisterLocation> current_dex_registers_; ScopedArenaVector<DexRegisterLocation> previous_dex_registers_; ScopedArenaVector<uint32_t> dex_register_timestamp_; // Stack map index of last change. @@ -131,7 +131,7 @@ class StackMapStream : public ValueObject { // Temporary variables used in CreateDexRegisterMap. // They are here so that we can reuse the reserved memory. ArenaBitVector temp_dex_register_mask_; - ScopedArenaVector<BitTableBuilder<DexRegisterMapInfo::kCount>::Entry> temp_dex_register_map_; + ScopedArenaVector<BitTableBuilder<DexRegisterMapInfo>::Entry> temp_dex_register_map_; // A set of lambda functions to be executed at the end to verify // the encoded data. It is generally only used in debug builds. diff --git a/dex2oat/dex2oat_test.cc b/dex2oat/dex2oat_test.cc index a060fd2e36..ad44624f76 100644 --- a/dex2oat/dex2oat_test.cc +++ b/dex2oat/dex2oat_test.cc @@ -1768,7 +1768,7 @@ TEST_F(Dex2oatTest, CompactDexGenerationFailureMultiDex) { writer.Finish(); ASSERT_EQ(apk_file.GetFile()->Flush(), 0); } - const std::string dex_location = apk_file.GetFilename(); + const std::string& dex_location = apk_file.GetFilename(); const std::string odex_location = GetOdexDir() + "/output.odex"; GenerateOdexForTest(dex_location, odex_location, @@ -1974,7 +1974,7 @@ TEST_F(Dex2oatTest, QuickenedInput) { << "Failed to find candidate code item with only one code unit in last instruction."; }); - std::string dex_location = temp_dex.GetFilename(); + const std::string& dex_location = temp_dex.GetFilename(); std::string odex_location = GetOdexDir() + "/quickened.odex"; std::string vdex_location = GetOdexDir() + "/quickened.vdex"; std::unique_ptr<File> vdex_output(OS::CreateEmptyFile(vdex_location.c_str())); @@ -2049,7 +2049,7 @@ TEST_F(Dex2oatTest, CompactDexInvalidSource) { writer.Finish(); ASSERT_EQ(invalid_dex.GetFile()->Flush(), 0); } - const std::string dex_location = invalid_dex.GetFilename(); + const std::string& dex_location = invalid_dex.GetFilename(); const std::string odex_location = GetOdexDir() + "/output.odex"; std::string error_msg; int status = GenerateOdexForTestWithStatus( diff --git a/dex2oat/linker/arm/relative_patcher_arm_base.cc b/dex2oat/linker/arm/relative_patcher_arm_base.cc index 7cb8ae55c5..a2ba339278 100644 --- a/dex2oat/linker/arm/relative_patcher_arm_base.cc +++ b/dex2oat/linker/arm/relative_patcher_arm_base.cc @@ -251,7 +251,7 @@ std::vector<debug::MethodDebugInfo> ArmBaseRelativePatcher::GenerateThunkDebugIn continue; } // Get the base name to use for the first occurrence of the thunk. - std::string base_name = data.GetDebugName(); + const std::string& base_name = data.GetDebugName(); for (size_t i = start, num = data.NumberOfThunks(); i != num; ++i) { debug::MethodDebugInfo info = {}; if (i == 0u) { diff --git a/dexlayout/compact_dex_writer.cc b/dexlayout/compact_dex_writer.cc index 3f5dbcfce5..00fb0af710 100644 --- a/dexlayout/compact_dex_writer.cc +++ b/dexlayout/compact_dex_writer.cc @@ -40,9 +40,8 @@ CompactDexWriter::Container::Container(bool dedupe_code_items) uint32_t CompactDexWriter::WriteDebugInfoOffsetTable(Stream* stream) { const uint32_t start_offset = stream->Tell(); - const dex_ir::Collections& collections = header_->GetCollections(); // Debug offsets for method indexes. 0 means no debug info. - std::vector<uint32_t> debug_info_offsets(collections.MethodIdsSize(), 0u); + std::vector<uint32_t> debug_info_offsets(header_->MethodIds().Size(), 0u); static constexpr InvokeType invoke_types[] = { kDirect, @@ -50,7 +49,7 @@ uint32_t CompactDexWriter::WriteDebugInfoOffsetTable(Stream* stream) { }; for (InvokeType invoke_type : invoke_types) { - for (const std::unique_ptr<dex_ir::ClassDef>& class_def : collections.ClassDefs()) { + for (auto& class_def : header_->ClassDefs()) { // Skip classes that are not defined in this dex file. dex_ir::ClassData* class_data = class_def->GetClassData(); if (class_data == nullptr) { @@ -232,14 +231,13 @@ uint32_t CompactDexWriter::Deduper::Dedupe(uint32_t data_start, } void CompactDexWriter::SortDebugInfosByMethodIndex() { - dex_ir::Collections& collections = header_->GetCollections(); static constexpr InvokeType invoke_types[] = { kDirect, kVirtual }; std::map<const dex_ir::DebugInfoItem*, uint32_t> method_idx_map; for (InvokeType invoke_type : invoke_types) { - for (std::unique_ptr<dex_ir::ClassDef>& class_def : collections.ClassDefs()) { + for (auto& class_def : header_->ClassDefs()) { // Skip classes that are not defined in this dex file. dex_ir::ClassData* class_data = class_def->GetClassData(); if (class_data == nullptr) { @@ -257,8 +255,8 @@ void CompactDexWriter::SortDebugInfosByMethodIndex() { } } } - std::sort(collections.DebugInfoItems().begin(), - collections.DebugInfoItems().end(), + std::sort(header_->DebugInfoItems().begin(), + header_->DebugInfoItems().end(), [&](const std::unique_ptr<dex_ir::DebugInfoItem>& a, const std::unique_ptr<dex_ir::DebugInfoItem>& b) { auto it_a = method_idx_map.find(a.get()); @@ -282,20 +280,19 @@ void CompactDexWriter::WriteHeader(Stream* stream) { header.endian_tag_ = header_->EndianTag(); header.link_size_ = header_->LinkSize(); header.link_off_ = header_->LinkOffset(); - const dex_ir::Collections& collections = header_->GetCollections(); - header.map_off_ = collections.MapListOffset(); - header.string_ids_size_ = collections.StringIdsSize(); - header.string_ids_off_ = collections.StringIdsOffset(); - header.type_ids_size_ = collections.TypeIdsSize(); - header.type_ids_off_ = collections.TypeIdsOffset(); - header.proto_ids_size_ = collections.ProtoIdsSize(); - header.proto_ids_off_ = collections.ProtoIdsOffset(); - header.field_ids_size_ = collections.FieldIdsSize(); - header.field_ids_off_ = collections.FieldIdsOffset(); - header.method_ids_size_ = collections.MethodIdsSize(); - header.method_ids_off_ = collections.MethodIdsOffset(); - header.class_defs_size_ = collections.ClassDefsSize(); - header.class_defs_off_ = collections.ClassDefsOffset(); + header.map_off_ = header_->MapListOffset(); + header.string_ids_size_ = header_->StringIds().Size(); + header.string_ids_off_ = header_->StringIds().GetOffset(); + header.type_ids_size_ = header_->TypeIds().Size(); + header.type_ids_off_ = header_->TypeIds().GetOffset(); + header.proto_ids_size_ = header_->ProtoIds().Size(); + header.proto_ids_off_ = header_->ProtoIds().GetOffset(); + header.field_ids_size_ = header_->FieldIds().Size(); + header.field_ids_off_ = header_->FieldIds().GetOffset(); + header.method_ids_size_ = header_->MethodIds().Size(); + header.method_ids_off_ = header_->MethodIds().GetOffset(); + header.class_defs_size_ = header_->ClassDefs().Size(); + header.class_defs_off_ = header_->ClassDefs().GetOffset(); header.data_size_ = header_->DataSize(); header.data_off_ = header_->DataOffset(); header.owned_data_begin_ = owned_data_begin_; @@ -332,16 +329,15 @@ void CompactDexWriter::WriteStringData(Stream* stream, dex_ir::StringData* strin } bool CompactDexWriter::CanGenerateCompactDex(std::string* error_msg) { - dex_ir::Collections& collections = header_->GetCollections(); static constexpr InvokeType invoke_types[] = { kDirect, kVirtual }; - std::vector<bool> saw_method_id(collections.MethodIdsSize(), false); - std::vector<dex_ir::CodeItem*> method_id_code_item(collections.MethodIdsSize(), nullptr); - std::vector<dex_ir::DebugInfoItem*> method_id_debug_info(collections.MethodIdsSize(), nullptr); + std::vector<bool> saw_method_id(header_->MethodIds().Size(), false); + std::vector<dex_ir::CodeItem*> method_id_code_item(header_->MethodIds().Size(), nullptr); + std::vector<dex_ir::DebugInfoItem*> method_id_debug_info(header_->MethodIds().Size(), nullptr); for (InvokeType invoke_type : invoke_types) { - for (std::unique_ptr<dex_ir::ClassDef>& class_def : collections.ClassDefs()) { + for (auto& class_def : header_->ClassDefs()) { // Skip classes that are not defined in this dex file. dex_ir::ClassData* class_data = class_def->GetClassData(); if (class_data == nullptr) { @@ -407,8 +403,6 @@ bool CompactDexWriter::Write(DexContainer* output, std::string* error_msg) { // Starting offset is right after the header. main_stream->Seek(GetHeaderSize()); - dex_ir::Collections& collection = header_->GetCollections(); - // Based on: https://source.android.com/devices/tech/dalvik/dex-format // Since the offsets may not be calculated already, the writing must be done in the correct order. const uint32_t string_ids_offset = main_stream->Tell(); @@ -469,16 +463,16 @@ bool CompactDexWriter::Write(DexContainer* output, std::string* error_msg) { // Write the map list. if (compute_offsets_) { data_stream->AlignTo(SectionAlignment(DexFile::kDexTypeMapList)); - collection.SetMapListOffset(data_stream->Tell()); + header_->SetMapListOffset(data_stream->Tell()); } else { - data_stream->Seek(collection.MapListOffset()); + data_stream->Seek(header_->MapListOffset()); } // Map items are included in the data section. GenerateAndWriteMapItems(data_stream); // Write link data if it exists. - const std::vector<uint8_t>& link_data = collection.LinkData(); + const std::vector<uint8_t>& link_data = header_->LinkData(); if (link_data.size() > 0) { CHECK_EQ(header_->LinkSize(), static_cast<uint32_t>(link_data.size())); if (compute_offsets_) { diff --git a/dexlayout/dex_ir.cc b/dexlayout/dex_ir.cc index 15e3baf18a..3917847ea7 100644 --- a/dexlayout/dex_ir.cc +++ b/dexlayout/dex_ir.cc @@ -30,873 +30,11 @@ namespace art { namespace dex_ir { -static uint64_t ReadVarWidth(const uint8_t** data, uint8_t length, bool sign_extend) { - uint64_t value = 0; - for (uint32_t i = 0; i <= length; i++) { - value |= static_cast<uint64_t>(*(*data)++) << (i * 8); - } - if (sign_extend) { - int shift = (7 - length) * 8; - return (static_cast<int64_t>(value) << shift) >> shift; - } - return value; -} - -static uint32_t GetDebugInfoStreamSize(const uint8_t* debug_info_stream) { - const uint8_t* stream = debug_info_stream; - DecodeUnsignedLeb128(&stream); // line_start - uint32_t parameters_size = DecodeUnsignedLeb128(&stream); - for (uint32_t i = 0; i < parameters_size; ++i) { - DecodeUnsignedLeb128P1(&stream); // Parameter name. - } - - for (;;) { - uint8_t opcode = *stream++; - switch (opcode) { - case DexFile::DBG_END_SEQUENCE: - return stream - debug_info_stream; // end of stream. - case DexFile::DBG_ADVANCE_PC: - DecodeUnsignedLeb128(&stream); // addr_diff - break; - case DexFile::DBG_ADVANCE_LINE: - DecodeSignedLeb128(&stream); // line_diff - break; - case DexFile::DBG_START_LOCAL: - DecodeUnsignedLeb128(&stream); // register_num - DecodeUnsignedLeb128P1(&stream); // name_idx - DecodeUnsignedLeb128P1(&stream); // type_idx - break; - case DexFile::DBG_START_LOCAL_EXTENDED: - DecodeUnsignedLeb128(&stream); // register_num - DecodeUnsignedLeb128P1(&stream); // name_idx - DecodeUnsignedLeb128P1(&stream); // type_idx - DecodeUnsignedLeb128P1(&stream); // sig_idx - break; - case DexFile::DBG_END_LOCAL: - case DexFile::DBG_RESTART_LOCAL: - DecodeUnsignedLeb128(&stream); // register_num - break; - case DexFile::DBG_SET_PROLOGUE_END: - case DexFile::DBG_SET_EPILOGUE_BEGIN: - break; - case DexFile::DBG_SET_FILE: { - DecodeUnsignedLeb128P1(&stream); // name_idx - break; - } - default: { - break; - } - } - } -} - -static bool GetIdFromInstruction(Collections& collections, - const Instruction* dec_insn, - std::vector<TypeId*>* type_ids, - std::vector<StringId*>* string_ids, - std::vector<MethodId*>* method_ids, - std::vector<FieldId*>* field_ids) { - // Determine index and width of the string. - uint32_t index = 0; - switch (Instruction::FormatOf(dec_insn->Opcode())) { - // SOME NOT SUPPORTED: - // case Instruction::k20bc: - case Instruction::k21c: - case Instruction::k35c: - // case Instruction::k35ms: - case Instruction::k3rc: - // case Instruction::k3rms: - // case Instruction::k35mi: - // case Instruction::k3rmi: - case Instruction::k45cc: - case Instruction::k4rcc: - index = dec_insn->VRegB(); - break; - case Instruction::k31c: - index = dec_insn->VRegB(); - break; - case Instruction::k22c: - // case Instruction::k22cs: - index = dec_insn->VRegC(); - break; - default: - break; - } // switch - - // Determine index type, and add reference to the appropriate collection. - switch (Instruction::IndexTypeOf(dec_insn->Opcode())) { - case Instruction::kIndexTypeRef: - if (index < collections.TypeIdsSize()) { - type_ids->push_back(collections.GetTypeId(index)); - return true; - } - break; - case Instruction::kIndexStringRef: - if (index < collections.StringIdsSize()) { - string_ids->push_back(collections.GetStringId(index)); - return true; - } - break; - case Instruction::kIndexMethodRef: - case Instruction::kIndexMethodAndProtoRef: - if (index < collections.MethodIdsSize()) { - method_ids->push_back(collections.GetMethodId(index)); - return true; - } - break; - case Instruction::kIndexFieldRef: - if (index < collections.FieldIdsSize()) { - field_ids->push_back(collections.GetFieldId(index)); - return true; - } - break; - case Instruction::kIndexUnknown: - case Instruction::kIndexNone: - case Instruction::kIndexVtableOffset: - case Instruction::kIndexFieldOffset: - default: - break; - } // switch - return false; -} - -/* - * Get all the types, strings, methods, and fields referred to from bytecode. - */ -static bool GetIdsFromByteCode(Collections& collections, - const CodeItem* code, - std::vector<TypeId*>* type_ids, - std::vector<StringId*>* string_ids, - std::vector<MethodId*>* method_ids, - std::vector<FieldId*>* field_ids) { - bool has_id = false; - IterationRange<DexInstructionIterator> instructions = code->Instructions(); - SafeDexInstructionIterator it(instructions.begin(), instructions.end()); - for (; !it.IsErrorState() && it < instructions.end(); ++it) { - // In case the instruction goes past the end of the code item, make sure to not process it. - SafeDexInstructionIterator next = it; - ++next; - if (next.IsErrorState()) { - break; - } - has_id |= GetIdFromInstruction(collections, - &it.Inst(), - type_ids, - string_ids, - method_ids, - field_ids); - } // for - return has_id; -} - -EncodedValue* Collections::ReadEncodedValue(const DexFile& dex_file, const uint8_t** data) { - const uint8_t encoded_value = *(*data)++; - const uint8_t type = encoded_value & 0x1f; - EncodedValue* item = new EncodedValue(type); - ReadEncodedValue(dex_file, data, type, encoded_value >> 5, item); - return item; -} - -EncodedValue* Collections::ReadEncodedValue(const DexFile& dex_file, - const uint8_t** data, - uint8_t type, - uint8_t length) { - EncodedValue* item = new EncodedValue(type); - ReadEncodedValue(dex_file, data, type, length, item); - return item; -} - -void Collections::ReadEncodedValue(const DexFile& dex_file, - const uint8_t** data, - uint8_t type, - uint8_t length, - EncodedValue* item) { - switch (type) { - case DexFile::kDexAnnotationByte: - item->SetByte(static_cast<int8_t>(ReadVarWidth(data, length, false))); - break; - case DexFile::kDexAnnotationShort: - item->SetShort(static_cast<int16_t>(ReadVarWidth(data, length, true))); - break; - case DexFile::kDexAnnotationChar: - item->SetChar(static_cast<uint16_t>(ReadVarWidth(data, length, false))); - break; - case DexFile::kDexAnnotationInt: - item->SetInt(static_cast<int32_t>(ReadVarWidth(data, length, true))); - break; - case DexFile::kDexAnnotationLong: - item->SetLong(static_cast<int64_t>(ReadVarWidth(data, length, true))); - break; - case DexFile::kDexAnnotationFloat: { - // Fill on right. - union { - float f; - uint32_t data; - } conv; - conv.data = static_cast<uint32_t>(ReadVarWidth(data, length, false)) << (3 - length) * 8; - item->SetFloat(conv.f); - break; - } - case DexFile::kDexAnnotationDouble: { - // Fill on right. - union { - double d; - uint64_t data; - } conv; - conv.data = ReadVarWidth(data, length, false) << (7 - length) * 8; - item->SetDouble(conv.d); - break; - } - case DexFile::kDexAnnotationMethodType: { - const uint32_t proto_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); - item->SetProtoId(GetProtoId(proto_index)); - break; - } - case DexFile::kDexAnnotationMethodHandle: { - const uint32_t method_handle_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); - item->SetMethodHandle(GetMethodHandle(method_handle_index)); - break; - } - case DexFile::kDexAnnotationString: { - const uint32_t string_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); - item->SetStringId(GetStringId(string_index)); - break; - } - case DexFile::kDexAnnotationType: { - const uint32_t string_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); - item->SetTypeId(GetTypeId(string_index)); - break; - } - case DexFile::kDexAnnotationField: - case DexFile::kDexAnnotationEnum: { - const uint32_t field_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); - item->SetFieldId(GetFieldId(field_index)); - break; - } - case DexFile::kDexAnnotationMethod: { - const uint32_t method_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); - item->SetMethodId(GetMethodId(method_index)); - break; - } - case DexFile::kDexAnnotationArray: { - EncodedValueVector* values = new EncodedValueVector(); - const uint32_t offset = *data - dex_file.DataBegin(); - const uint32_t size = DecodeUnsignedLeb128(data); - // Decode all elements. - for (uint32_t i = 0; i < size; i++) { - values->push_back(std::unique_ptr<EncodedValue>(ReadEncodedValue(dex_file, data))); - } - EncodedArrayItem* array_item = new EncodedArrayItem(values); - if (eagerly_assign_offsets_) { - array_item->SetOffset(offset); - } - item->SetEncodedArray(array_item); - break; - } - case DexFile::kDexAnnotationAnnotation: { - AnnotationElementVector* elements = new AnnotationElementVector(); - const uint32_t type_idx = DecodeUnsignedLeb128(data); - const uint32_t size = DecodeUnsignedLeb128(data); - // Decode all name=value pairs. - for (uint32_t i = 0; i < size; i++) { - const uint32_t name_index = DecodeUnsignedLeb128(data); - elements->push_back(std::unique_ptr<AnnotationElement>( - new AnnotationElement(GetStringId(name_index), ReadEncodedValue(dex_file, data)))); - } - item->SetEncodedAnnotation(new EncodedAnnotation(GetTypeId(type_idx), elements)); - break; - } - case DexFile::kDexAnnotationNull: - break; - case DexFile::kDexAnnotationBoolean: - item->SetBoolean(length != 0); - break; - default: - break; - } -} - -void Collections::CreateStringId(const DexFile& dex_file, uint32_t i) { - const DexFile::StringId& disk_string_id = dex_file.GetStringId(dex::StringIndex(i)); - StringData* string_data = CreateAndAddItem(string_datas_map_, - string_datas_, - disk_string_id.string_data_off_, - dex_file.GetStringData(disk_string_id)); - CreateAndAddIndexedItem(string_ids_, - StringIdsOffset() + i * StringId::ItemSize(), - i, - string_data); -} - -void Collections::CreateTypeId(const DexFile& dex_file, uint32_t i) { - const DexFile::TypeId& disk_type_id = dex_file.GetTypeId(dex::TypeIndex(i)); - CreateAndAddIndexedItem(type_ids_, - TypeIdsOffset() + i * TypeId::ItemSize(), - i, - GetStringId(disk_type_id.descriptor_idx_.index_)); -} - -void Collections::CreateProtoId(const DexFile& dex_file, uint32_t i) { - const DexFile::ProtoId& disk_proto_id = dex_file.GetProtoId(dex::ProtoIndex(i)); - const DexFile::TypeList* type_list = dex_file.GetProtoParameters(disk_proto_id); - TypeList* parameter_type_list = CreateTypeList(type_list, disk_proto_id.parameters_off_); - - CreateAndAddIndexedItem(proto_ids_, - ProtoIdsOffset() + i * ProtoId::ItemSize(), - i, - GetStringId(disk_proto_id.shorty_idx_.index_), - GetTypeId(disk_proto_id.return_type_idx_.index_), - parameter_type_list); -} - -void Collections::CreateFieldId(const DexFile& dex_file, uint32_t i) { - const DexFile::FieldId& disk_field_id = dex_file.GetFieldId(i); - CreateAndAddIndexedItem(field_ids_, - FieldIdsOffset() + i * FieldId::ItemSize(), - i, - GetTypeId(disk_field_id.class_idx_.index_), - GetTypeId(disk_field_id.type_idx_.index_), - GetStringId(disk_field_id.name_idx_.index_)); -} - -void Collections::CreateMethodId(const DexFile& dex_file, uint32_t i) { - const DexFile::MethodId& disk_method_id = dex_file.GetMethodId(i); - CreateAndAddIndexedItem(method_ids_, - MethodIdsOffset() + i * MethodId::ItemSize(), - i, - GetTypeId(disk_method_id.class_idx_.index_), - GetProtoId(disk_method_id.proto_idx_.index_), - GetStringId(disk_method_id.name_idx_.index_)); -} - -void Collections::CreateClassDef(const DexFile& dex_file, uint32_t i) { - const DexFile::ClassDef& disk_class_def = dex_file.GetClassDef(i); - const TypeId* class_type = GetTypeId(disk_class_def.class_idx_.index_); - uint32_t access_flags = disk_class_def.access_flags_; - const TypeId* superclass = GetTypeIdOrNullPtr(disk_class_def.superclass_idx_.index_); - - const DexFile::TypeList* type_list = dex_file.GetInterfacesList(disk_class_def); - TypeList* interfaces_type_list = CreateTypeList(type_list, disk_class_def.interfaces_off_); - - const StringId* source_file = GetStringIdOrNullPtr(disk_class_def.source_file_idx_.index_); - // Annotations. - AnnotationsDirectoryItem* annotations = nullptr; - const DexFile::AnnotationsDirectoryItem* disk_annotations_directory_item = - dex_file.GetAnnotationsDirectory(disk_class_def); - if (disk_annotations_directory_item != nullptr) { - annotations = CreateAnnotationsDirectoryItem( - dex_file, disk_annotations_directory_item, disk_class_def.annotations_off_); - } - // Static field initializers. - const uint8_t* static_data = dex_file.GetEncodedStaticFieldValuesArray(disk_class_def); - EncodedArrayItem* static_values = - CreateEncodedArrayItem(dex_file, static_data, disk_class_def.static_values_off_); - ClassData* class_data = CreateClassData( - dex_file, dex_file.GetClassData(disk_class_def), disk_class_def.class_data_off_); - CreateAndAddIndexedItem(class_defs_, - ClassDefsOffset() + i * ClassDef::ItemSize(), - i, - class_type, - access_flags, - superclass, - interfaces_type_list, - source_file, - annotations, - static_values, - class_data); -} - -TypeList* Collections::CreateTypeList(const DexFile::TypeList* dex_type_list, uint32_t offset) { - if (dex_type_list == nullptr) { - return nullptr; - } - TypeList* type_list = type_lists_map_.GetExistingObject(offset); - if (type_list == nullptr) { - TypeIdVector* type_vector = new TypeIdVector(); - uint32_t size = dex_type_list->Size(); - for (uint32_t index = 0; index < size; ++index) { - type_vector->push_back(GetTypeId(dex_type_list->GetTypeItem(index).type_idx_.index_)); - } - type_list = CreateAndAddItem(type_lists_map_, type_lists_, offset, type_vector); - } - return type_list; -} - -EncodedArrayItem* Collections::CreateEncodedArrayItem(const DexFile& dex_file, - const uint8_t* static_data, - uint32_t offset) { - if (static_data == nullptr) { - return nullptr; - } - EncodedArrayItem* encoded_array_item = encoded_array_items_map_.GetExistingObject(offset); - if (encoded_array_item == nullptr) { - uint32_t size = DecodeUnsignedLeb128(&static_data); - EncodedValueVector* values = new EncodedValueVector(); - for (uint32_t i = 0; i < size; ++i) { - values->push_back(std::unique_ptr<EncodedValue>(ReadEncodedValue(dex_file, &static_data))); - } - // TODO: Calculate the size of the encoded array. - encoded_array_item = CreateAndAddItem(encoded_array_items_map_, - encoded_array_items_, - offset, - values); - } - return encoded_array_item; -} - -void Collections::AddAnnotationsFromMapListSection(const DexFile& dex_file, - uint32_t start_offset, - uint32_t count) { - uint32_t current_offset = start_offset; - for (size_t i = 0; i < count; ++i) { - // Annotation that we didn't process already, add it to the set. - const DexFile::AnnotationItem* annotation = dex_file.GetAnnotationItemAtOffset(current_offset); - AnnotationItem* annotation_item = CreateAnnotationItem(dex_file, annotation); - DCHECK(annotation_item != nullptr); - current_offset += annotation_item->GetSize(); - } -} - -AnnotationItem* Collections::CreateAnnotationItem(const DexFile& dex_file, - const DexFile::AnnotationItem* annotation) { - const uint8_t* const start_data = reinterpret_cast<const uint8_t*>(annotation); - const uint32_t offset = start_data - dex_file.DataBegin(); - AnnotationItem* annotation_item = annotation_items_map_.GetExistingObject(offset); - if (annotation_item == nullptr) { - uint8_t visibility = annotation->visibility_; - const uint8_t* annotation_data = annotation->annotation_; - std::unique_ptr<EncodedValue> encoded_value( - ReadEncodedValue(dex_file, &annotation_data, DexFile::kDexAnnotationAnnotation, 0)); - annotation_item = CreateAndAddItem(annotation_items_map_, - annotation_items_, - offset, - visibility, - encoded_value->ReleaseEncodedAnnotation()); - annotation_item->SetSize(annotation_data - start_data); - } - return annotation_item; -} - - -AnnotationSetItem* Collections::CreateAnnotationSetItem(const DexFile& dex_file, - const DexFile::AnnotationSetItem* disk_annotations_item, uint32_t offset) { - if (disk_annotations_item == nullptr || (disk_annotations_item->size_ == 0 && offset == 0)) { - return nullptr; - } - AnnotationSetItem* annotation_set_item = annotation_set_items_map_.GetExistingObject(offset); - if (annotation_set_item == nullptr) { - std::vector<AnnotationItem*>* items = new std::vector<AnnotationItem*>(); - for (uint32_t i = 0; i < disk_annotations_item->size_; ++i) { - const DexFile::AnnotationItem* annotation = - dex_file.GetAnnotationItem(disk_annotations_item, i); - if (annotation == nullptr) { - continue; - } - AnnotationItem* annotation_item = CreateAnnotationItem(dex_file, annotation); - items->push_back(annotation_item); - } - annotation_set_item = CreateAndAddItem(annotation_set_items_map_, - annotation_set_items_, - offset, - items); - } - return annotation_set_item; -} - -AnnotationsDirectoryItem* Collections::CreateAnnotationsDirectoryItem(const DexFile& dex_file, - const DexFile::AnnotationsDirectoryItem* disk_annotations_item, uint32_t offset) { - AnnotationsDirectoryItem* annotations_directory_item = - annotations_directory_items_map_.GetExistingObject(offset); - if (annotations_directory_item != nullptr) { - return annotations_directory_item; - } - const DexFile::AnnotationSetItem* class_set_item = - dex_file.GetClassAnnotationSet(disk_annotations_item); - AnnotationSetItem* class_annotation = nullptr; - if (class_set_item != nullptr) { - uint32_t item_offset = disk_annotations_item->class_annotations_off_; - class_annotation = CreateAnnotationSetItem(dex_file, class_set_item, item_offset); - } - const DexFile::FieldAnnotationsItem* fields = - dex_file.GetFieldAnnotations(disk_annotations_item); - FieldAnnotationVector* field_annotations = nullptr; - if (fields != nullptr) { - field_annotations = new FieldAnnotationVector(); - for (uint32_t i = 0; i < disk_annotations_item->fields_size_; ++i) { - FieldId* field_id = GetFieldId(fields[i].field_idx_); - const DexFile::AnnotationSetItem* field_set_item = - dex_file.GetFieldAnnotationSetItem(fields[i]); - uint32_t annotation_set_offset = fields[i].annotations_off_; - AnnotationSetItem* annotation_set_item = - CreateAnnotationSetItem(dex_file, field_set_item, annotation_set_offset); - field_annotations->push_back(std::unique_ptr<FieldAnnotation>( - new FieldAnnotation(field_id, annotation_set_item))); - } - } - const DexFile::MethodAnnotationsItem* methods = - dex_file.GetMethodAnnotations(disk_annotations_item); - MethodAnnotationVector* method_annotations = nullptr; - if (methods != nullptr) { - method_annotations = new MethodAnnotationVector(); - for (uint32_t i = 0; i < disk_annotations_item->methods_size_; ++i) { - MethodId* method_id = GetMethodId(methods[i].method_idx_); - const DexFile::AnnotationSetItem* method_set_item = - dex_file.GetMethodAnnotationSetItem(methods[i]); - uint32_t annotation_set_offset = methods[i].annotations_off_; - AnnotationSetItem* annotation_set_item = - CreateAnnotationSetItem(dex_file, method_set_item, annotation_set_offset); - method_annotations->push_back(std::unique_ptr<MethodAnnotation>( - new MethodAnnotation(method_id, annotation_set_item))); - } - } - const DexFile::ParameterAnnotationsItem* parameters = - dex_file.GetParameterAnnotations(disk_annotations_item); - ParameterAnnotationVector* parameter_annotations = nullptr; - if (parameters != nullptr) { - parameter_annotations = new ParameterAnnotationVector(); - for (uint32_t i = 0; i < disk_annotations_item->parameters_size_; ++i) { - MethodId* method_id = GetMethodId(parameters[i].method_idx_); - const DexFile::AnnotationSetRefList* list = - dex_file.GetParameterAnnotationSetRefList(¶meters[i]); - parameter_annotations->push_back(std::unique_ptr<ParameterAnnotation>( - GenerateParameterAnnotation(dex_file, method_id, list, parameters[i].annotations_off_))); - } - } - // TODO: Calculate the size of the annotations directory. - return CreateAndAddItem(annotations_directory_items_map_, - annotations_directory_items_, - offset, - class_annotation, - field_annotations, - method_annotations, - parameter_annotations); -} - -ParameterAnnotation* Collections::GenerateParameterAnnotation( - const DexFile& dex_file, MethodId* method_id, - const DexFile::AnnotationSetRefList* annotation_set_ref_list, uint32_t offset) { - AnnotationSetRefList* set_ref_list = annotation_set_ref_lists_map_.GetExistingObject(offset); - if (set_ref_list == nullptr) { - std::vector<AnnotationSetItem*>* annotations = new std::vector<AnnotationSetItem*>(); - for (uint32_t i = 0; i < annotation_set_ref_list->size_; ++i) { - const DexFile::AnnotationSetItem* annotation_set_item = - dex_file.GetSetRefItemItem(&annotation_set_ref_list->list_[i]); - uint32_t set_offset = annotation_set_ref_list->list_[i].annotations_off_; - annotations->push_back(CreateAnnotationSetItem(dex_file, annotation_set_item, set_offset)); - } - set_ref_list = CreateAndAddItem(annotation_set_ref_lists_map_, - annotation_set_ref_lists_, - offset, - annotations); - } - return new ParameterAnnotation(method_id, set_ref_list); -} - -CodeItem* Collections::DedupeOrCreateCodeItem(const DexFile& dex_file, - const DexFile::CodeItem* disk_code_item, - uint32_t offset, - uint32_t dex_method_index) { - if (disk_code_item == nullptr) { - return nullptr; - } - CodeItemDebugInfoAccessor accessor(dex_file, disk_code_item, dex_method_index); - const uint32_t debug_info_offset = accessor.DebugInfoOffset(); - - // Create the offsets pair and dedupe based on it. - std::pair<uint32_t, uint32_t> offsets_pair(offset, debug_info_offset); - auto existing = code_items_map_.find(offsets_pair); - if (existing != code_items_map_.end()) { - return existing->second; - } - - const uint8_t* debug_info_stream = dex_file.GetDebugInfoStream(debug_info_offset); - DebugInfoItem* debug_info = nullptr; - if (debug_info_stream != nullptr) { - debug_info = debug_info_items_map_.GetExistingObject(debug_info_offset); - if (debug_info == nullptr) { - uint32_t debug_info_size = GetDebugInfoStreamSize(debug_info_stream); - uint8_t* debug_info_buffer = new uint8_t[debug_info_size]; - memcpy(debug_info_buffer, debug_info_stream, debug_info_size); - debug_info = CreateAndAddItem(debug_info_items_map_, - debug_info_items_, - debug_info_offset, - debug_info_size, - debug_info_buffer); - } - } - - uint32_t insns_size = accessor.InsnsSizeInCodeUnits(); - uint16_t* insns = new uint16_t[insns_size]; - memcpy(insns, accessor.Insns(), insns_size * sizeof(uint16_t)); - - TryItemVector* tries = nullptr; - CatchHandlerVector* handler_list = nullptr; - if (accessor.TriesSize() > 0) { - tries = new TryItemVector(); - handler_list = new CatchHandlerVector(); - for (const DexFile::TryItem& disk_try_item : accessor.TryItems()) { - uint32_t start_addr = disk_try_item.start_addr_; - uint16_t insn_count = disk_try_item.insn_count_; - uint16_t handler_off = disk_try_item.handler_off_; - const CatchHandler* handlers = nullptr; - for (std::unique_ptr<const CatchHandler>& existing_handlers : *handler_list) { - if (handler_off == existing_handlers->GetListOffset()) { - handlers = existing_handlers.get(); - break; - } - } - if (handlers == nullptr) { - bool catch_all = false; - TypeAddrPairVector* addr_pairs = new TypeAddrPairVector(); - for (CatchHandlerIterator it(accessor, disk_try_item); it.HasNext(); it.Next()) { - const dex::TypeIndex type_index = it.GetHandlerTypeIndex(); - const TypeId* type_id = GetTypeIdOrNullPtr(type_index.index_); - catch_all |= type_id == nullptr; - addr_pairs->push_back(std::unique_ptr<const TypeAddrPair>( - new TypeAddrPair(type_id, it.GetHandlerAddress()))); - } - handlers = new CatchHandler(catch_all, handler_off, addr_pairs); - handler_list->push_back(std::unique_ptr<const CatchHandler>(handlers)); - } - TryItem* try_item = new TryItem(start_addr, insn_count, handlers); - tries->push_back(std::unique_ptr<const TryItem>(try_item)); - } - // Manually walk catch handlers list and add any missing handlers unreferenced by try items. - const uint8_t* handlers_base = accessor.GetCatchHandlerData(); - const uint8_t* handlers_data = handlers_base; - uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_data); - while (handlers_size > handler_list->size()) { - bool already_added = false; - uint16_t handler_off = handlers_data - handlers_base; - for (std::unique_ptr<const CatchHandler>& existing_handlers : *handler_list) { - if (handler_off == existing_handlers->GetListOffset()) { - already_added = true; - break; - } - } - int32_t size = DecodeSignedLeb128(&handlers_data); - bool has_catch_all = size <= 0; - if (has_catch_all) { - size = -size; - } - if (already_added) { - for (int32_t i = 0; i < size; i++) { - DecodeUnsignedLeb128(&handlers_data); - DecodeUnsignedLeb128(&handlers_data); - } - if (has_catch_all) { - DecodeUnsignedLeb128(&handlers_data); - } - continue; - } - TypeAddrPairVector* addr_pairs = new TypeAddrPairVector(); - for (int32_t i = 0; i < size; i++) { - const TypeId* type_id = GetTypeIdOrNullPtr(DecodeUnsignedLeb128(&handlers_data)); - uint32_t addr = DecodeUnsignedLeb128(&handlers_data); - addr_pairs->push_back( - std::unique_ptr<const TypeAddrPair>(new TypeAddrPair(type_id, addr))); - } - if (has_catch_all) { - uint32_t addr = DecodeUnsignedLeb128(&handlers_data); - addr_pairs->push_back( - std::unique_ptr<const TypeAddrPair>(new TypeAddrPair(nullptr, addr))); - } - const CatchHandler* handler = new CatchHandler(has_catch_all, handler_off, addr_pairs); - handler_list->push_back(std::unique_ptr<const CatchHandler>(handler)); - } - } - - uint32_t size = dex_file.GetCodeItemSize(*disk_code_item); - CodeItem* code_item = code_items_.CreateAndAddItem(accessor.RegistersSize(), - accessor.InsSize(), - accessor.OutsSize(), - debug_info, - insns_size, - insns, - tries, - handler_list); - code_item->SetSize(size); - - // Add the code item to the map. - DCHECK(!code_item->OffsetAssigned()); - if (eagerly_assign_offsets_) { - code_item->SetOffset(offset); - } - code_items_map_.emplace(offsets_pair, code_item); - - // Add "fixup" references to types, strings, methods, and fields. - // This is temporary, as we will probably want more detailed parsing of the - // instructions here. - std::vector<TypeId*> type_ids; - std::vector<StringId*> string_ids; - std::vector<MethodId*> method_ids; - std::vector<FieldId*> field_ids; - if (GetIdsFromByteCode(*this, - code_item, - /*out*/ &type_ids, - /*out*/ &string_ids, - /*out*/ &method_ids, - /*out*/ &field_ids)) { - CodeFixups* fixups = new CodeFixups(std::move(type_ids), - std::move(string_ids), - std::move(method_ids), - std::move(field_ids)); - code_item->SetCodeFixups(fixups); - } - - return code_item; -} - -MethodItem Collections::GenerateMethodItem(const DexFile& dex_file, ClassDataItemIterator& cdii) { - MethodId* method_id = GetMethodId(cdii.GetMemberIndex()); - uint32_t access_flags = cdii.GetRawMemberAccessFlags(); - const DexFile::CodeItem* disk_code_item = cdii.GetMethodCodeItem(); - // Temporary hack to prevent incorrectly deduping code items if they have the same offset since - // they may have different debug info streams. - CodeItem* code_item = DedupeOrCreateCodeItem(dex_file, - disk_code_item, - cdii.GetMethodCodeItemOffset(), - cdii.GetMemberIndex()); - return MethodItem(access_flags, method_id, code_item); -} - -ClassData* Collections::CreateClassData( - const DexFile& dex_file, const uint8_t* encoded_data, uint32_t offset) { - // Read the fields and methods defined by the class, resolving the circular reference from those - // to classes by setting class at the same time. - ClassData* class_data = class_datas_map_.GetExistingObject(offset); - if (class_data == nullptr && encoded_data != nullptr) { - ClassDataItemIterator cdii(dex_file, encoded_data); - // Static fields. - FieldItemVector* static_fields = new FieldItemVector(); - for (; cdii.HasNextStaticField(); cdii.Next()) { - FieldId* field_item = GetFieldId(cdii.GetMemberIndex()); - uint32_t access_flags = cdii.GetRawMemberAccessFlags(); - static_fields->emplace_back(access_flags, field_item); - } - // Instance fields. - FieldItemVector* instance_fields = new FieldItemVector(); - for (; cdii.HasNextInstanceField(); cdii.Next()) { - FieldId* field_item = GetFieldId(cdii.GetMemberIndex()); - uint32_t access_flags = cdii.GetRawMemberAccessFlags(); - instance_fields->emplace_back(access_flags, field_item); - } - // Direct methods. - MethodItemVector* direct_methods = new MethodItemVector(); - for (; cdii.HasNextDirectMethod(); cdii.Next()) { - direct_methods->push_back(GenerateMethodItem(dex_file, cdii)); - } - // Virtual methods. - MethodItemVector* virtual_methods = new MethodItemVector(); - for (; cdii.HasNextVirtualMethod(); cdii.Next()) { - virtual_methods->push_back(GenerateMethodItem(dex_file, cdii)); - } - class_data = CreateAndAddItem(class_datas_map_, - class_datas_, - offset, - static_fields, - instance_fields, - direct_methods, - virtual_methods); - class_data->SetSize(cdii.EndDataPointer() - encoded_data); - } - return class_data; -} - -void Collections::CreateCallSitesAndMethodHandles(const DexFile& dex_file) { - // Iterate through the map list and set the offset of the CallSiteIds and MethodHandleItems. - const DexFile::MapList* map = dex_file.GetMapList(); - for (uint32_t i = 0; i < map->size_; ++i) { - const DexFile::MapItem* item = map->list_ + i; - switch (item->type_) { - case DexFile::kDexTypeCallSiteIdItem: - SetCallSiteIdsOffset(item->offset_); - break; - case DexFile::kDexTypeMethodHandleItem: - SetMethodHandleItemsOffset(item->offset_); - break; - default: - break; - } - } - // Populate MethodHandleItems first (CallSiteIds may depend on them). - for (uint32_t i = 0; i < dex_file.NumMethodHandles(); i++) { - CreateMethodHandleItem(dex_file, i); - } - // Populate CallSiteIds. - for (uint32_t i = 0; i < dex_file.NumCallSiteIds(); i++) { - CreateCallSiteId(dex_file, i); - } -} - -void Collections::CreateCallSiteId(const DexFile& dex_file, uint32_t i) { - const DexFile::CallSiteIdItem& disk_call_site_id = dex_file.GetCallSiteId(i); - const uint8_t* disk_call_item_ptr = dex_file.DataBegin() + disk_call_site_id.data_off_; - EncodedArrayItem* call_site_item = - CreateEncodedArrayItem(dex_file, disk_call_item_ptr, disk_call_site_id.data_off_); - - CreateAndAddIndexedItem(call_site_ids_, - CallSiteIdsOffset() + i * CallSiteId::ItemSize(), - i, - call_site_item); -} - -void Collections::CreateMethodHandleItem(const DexFile& dex_file, uint32_t i) { - const DexFile::MethodHandleItem& disk_method_handle = dex_file.GetMethodHandle(i); - uint16_t index = disk_method_handle.field_or_method_idx_; - DexFile::MethodHandleType type = - static_cast<DexFile::MethodHandleType>(disk_method_handle.method_handle_type_); - bool is_invoke = type == DexFile::MethodHandleType::kInvokeStatic || - type == DexFile::MethodHandleType::kInvokeInstance || - type == DexFile::MethodHandleType::kInvokeConstructor || - type == DexFile::MethodHandleType::kInvokeDirect || - type == DexFile::MethodHandleType::kInvokeInterface; - static_assert(DexFile::MethodHandleType::kLast == DexFile::MethodHandleType::kInvokeInterface, - "Unexpected method handle types."); - IndexedItem* field_or_method_id; - if (is_invoke) { - field_or_method_id = GetMethodId(index); - } else { - field_or_method_id = GetFieldId(index); - } - CreateAndAddIndexedItem(method_handle_items_, - MethodHandleItemsOffset() + i * MethodHandleItem::ItemSize(), - i, - type, - field_or_method_id); -} - -void Collections::SortVectorsByMapOrder() { - string_datas_.SortByMapOrder(string_datas_map_.Collection()); - type_lists_.SortByMapOrder(type_lists_map_.Collection()); - encoded_array_items_.SortByMapOrder(encoded_array_items_map_.Collection()); - annotation_items_.SortByMapOrder(annotation_items_map_.Collection()); - annotation_set_items_.SortByMapOrder(annotation_set_items_map_.Collection()); - annotation_set_ref_lists_.SortByMapOrder(annotation_set_ref_lists_map_.Collection()); - annotations_directory_items_.SortByMapOrder(annotations_directory_items_map_.Collection()); - debug_info_items_.SortByMapOrder(debug_info_items_map_.Collection()); - code_items_.SortByMapOrder(code_items_map_); - class_datas_.SortByMapOrder(class_datas_map_.Collection()); -} - -void Collections::ClearMaps() { - string_datas_map_.Collection().clear(); - type_lists_map_.Collection().clear(); - encoded_array_items_map_.Collection().clear(); - annotation_items_map_.Collection().clear(); - annotation_set_items_map_.Collection().clear(); - annotation_set_ref_lists_map_.Collection().clear(); - annotations_directory_items_map_.Collection().clear(); - debug_info_items_map_.Collection().clear(); - code_items_map_.clear(); - class_datas_map_.Collection().clear(); -} - -static uint32_t HeaderOffset(const dex_ir::Collections& collections ATTRIBUTE_UNUSED) { +static uint32_t HeaderOffset(const dex_ir::Header* header ATTRIBUTE_UNUSED) { return 0; } -static uint32_t HeaderSize(const dex_ir::Collections& collections ATTRIBUTE_UNUSED) { +static uint32_t HeaderSize(const dex_ir::Header* header ATTRIBUTE_UNUSED) { // Size is in elements, so there is only one header. return 1; } @@ -907,9 +45,9 @@ struct FileSectionDescriptor { std::string name; uint16_t type; // A function that when applied to a collection object, gives the size of the section. - std::function<uint32_t(const dex_ir::Collections&)> size_fn; + std::function<uint32_t(dex_ir::Header*)> size_fn; // A function that when applied to a collection object, gives the offset of the section. - std::function<uint32_t(const dex_ir::Collections&)> offset_fn; + std::function<uint32_t(dex_ir::Header*)> offset_fn; }; static const FileSectionDescriptor kFileSectionDescriptors[] = { @@ -921,106 +59,105 @@ static const FileSectionDescriptor kFileSectionDescriptors[] = { }, { "StringId", DexFile::kDexTypeStringIdItem, - &dex_ir::Collections::StringIdsSize, - &dex_ir::Collections::StringIdsOffset + [](const dex_ir::Header* h) { return h->StringIds().Size(); }, + [](const dex_ir::Header* h) { return h->StringIds().GetOffset(); } }, { "TypeId", DexFile::kDexTypeTypeIdItem, - &dex_ir::Collections::TypeIdsSize, - &dex_ir::Collections::TypeIdsOffset + [](const dex_ir::Header* h) { return h->TypeIds().Size(); }, + [](const dex_ir::Header* h) { return h->TypeIds().GetOffset(); } }, { "ProtoId", DexFile::kDexTypeProtoIdItem, - &dex_ir::Collections::ProtoIdsSize, - &dex_ir::Collections::ProtoIdsOffset + [](const dex_ir::Header* h) { return h->ProtoIds().Size(); }, + [](const dex_ir::Header* h) { return h->ProtoIds().GetOffset(); } }, { "FieldId", DexFile::kDexTypeFieldIdItem, - &dex_ir::Collections::FieldIdsSize, - &dex_ir::Collections::FieldIdsOffset + [](const dex_ir::Header* h) { return h->FieldIds().Size(); }, + [](const dex_ir::Header* h) { return h->FieldIds().GetOffset(); } }, { "MethodId", DexFile::kDexTypeMethodIdItem, - &dex_ir::Collections::MethodIdsSize, - &dex_ir::Collections::MethodIdsOffset + [](const dex_ir::Header* h) { return h->MethodIds().Size(); }, + [](const dex_ir::Header* h) { return h->MethodIds().GetOffset(); } }, { "ClassDef", DexFile::kDexTypeClassDefItem, - &dex_ir::Collections::ClassDefsSize, - &dex_ir::Collections::ClassDefsOffset + [](const dex_ir::Header* h) { return h->ClassDefs().Size(); }, + [](const dex_ir::Header* h) { return h->ClassDefs().GetOffset(); } }, { "CallSiteId", DexFile::kDexTypeCallSiteIdItem, - &dex_ir::Collections::CallSiteIdsSize, - &dex_ir::Collections::CallSiteIdsOffset + [](const dex_ir::Header* h) { return h->CallSiteIds().Size(); }, + [](const dex_ir::Header* h) { return h->CallSiteIds().GetOffset(); } }, { "MethodHandle", DexFile::kDexTypeMethodHandleItem, - &dex_ir::Collections::MethodHandleItemsSize, - &dex_ir::Collections::MethodHandleItemsOffset + [](const dex_ir::Header* h) { return h->MethodHandleItems().Size(); }, + [](const dex_ir::Header* h) { return h->MethodHandleItems().GetOffset(); } }, { "StringData", DexFile::kDexTypeStringDataItem, - &dex_ir::Collections::StringDatasSize, - &dex_ir::Collections::StringDatasOffset + [](const dex_ir::Header* h) { return h->StringDatas().Size(); }, + [](const dex_ir::Header* h) { return h->StringDatas().GetOffset(); } }, { "TypeList", DexFile::kDexTypeTypeList, - &dex_ir::Collections::TypeListsSize, - &dex_ir::Collections::TypeListsOffset + [](const dex_ir::Header* h) { return h->TypeLists().Size(); }, + [](const dex_ir::Header* h) { return h->TypeLists().GetOffset(); } }, { "EncArr", DexFile::kDexTypeEncodedArrayItem, - &dex_ir::Collections::EncodedArrayItemsSize, - &dex_ir::Collections::EncodedArrayItemsOffset + [](const dex_ir::Header* h) { return h->EncodedArrayItems().Size(); }, + [](const dex_ir::Header* h) { return h->EncodedArrayItems().GetOffset(); } }, { "Annotation", DexFile::kDexTypeAnnotationItem, - &dex_ir::Collections::AnnotationItemsSize, - &dex_ir::Collections::AnnotationItemsOffset + [](const dex_ir::Header* h) { return h->AnnotationItems().Size(); }, + [](const dex_ir::Header* h) { return h->AnnotationItems().GetOffset(); } }, { "AnnoSet", DexFile::kDexTypeAnnotationSetItem, - &dex_ir::Collections::AnnotationSetItemsSize, - &dex_ir::Collections::AnnotationSetItemsOffset + [](const dex_ir::Header* h) { return h->AnnotationSetItems().Size(); }, + [](const dex_ir::Header* h) { return h->AnnotationSetItems().GetOffset(); } }, { "AnnoSetRL", DexFile::kDexTypeAnnotationSetRefList, - &dex_ir::Collections::AnnotationSetRefListsSize, - &dex_ir::Collections::AnnotationSetRefListsOffset + [](const dex_ir::Header* h) { return h->AnnotationSetRefLists().Size(); }, + [](const dex_ir::Header* h) { return h->AnnotationSetRefLists().GetOffset(); } }, { "AnnoDir", DexFile::kDexTypeAnnotationsDirectoryItem, - &dex_ir::Collections::AnnotationsDirectoryItemsSize, - &dex_ir::Collections::AnnotationsDirectoryItemsOffset + [](const dex_ir::Header* h) { return h->AnnotationsDirectoryItems().Size(); }, + [](const dex_ir::Header* h) { return h->AnnotationsDirectoryItems().GetOffset(); } }, { "DebugInfo", DexFile::kDexTypeDebugInfoItem, - &dex_ir::Collections::DebugInfoItemsSize, - &dex_ir::Collections::DebugInfoItemsOffset + [](const dex_ir::Header* h) { return h->DebugInfoItems().Size(); }, + [](const dex_ir::Header* h) { return h->DebugInfoItems().GetOffset(); } }, { "CodeItem", DexFile::kDexTypeCodeItem, - &dex_ir::Collections::CodeItemsSize, - &dex_ir::Collections::CodeItemsOffset + [](const dex_ir::Header* h) { return h->CodeItems().Size(); }, + [](const dex_ir::Header* h) { return h->CodeItems().GetOffset(); } }, { "ClassData", DexFile::kDexTypeClassDataItem, - &dex_ir::Collections::ClassDatasSize, - &dex_ir::Collections::ClassDatasOffset + [](const dex_ir::Header* h) { return h->ClassDatas().Size(); }, + [](const dex_ir::Header* h) { return h->ClassDatas().GetOffset(); } } }; std::vector<dex_ir::DexFileSection> GetSortedDexFileSections(dex_ir::Header* header, dex_ir::SortDirection direction) { - const dex_ir::Collections& collections = header->GetCollections(); std::vector<dex_ir::DexFileSection> sorted_sections; // Build the table that will map from offset to color for (const FileSectionDescriptor& s : kFileSectionDescriptors) { sorted_sections.push_back(dex_ir::DexFileSection(s.name, s.type, - s.size_fn(collections), - s.offset_fn(collections))); + s.size_fn(header), + s.offset_fn(header))); } // Sort by offset. std::sort(sorted_sections.begin(), diff --git a/dexlayout/dex_ir.h b/dexlayout/dex_ir.h index 54ff105820..9f355ba9e8 100644 --- a/dexlayout/dex_ir.h +++ b/dexlayout/dex_ir.h @@ -24,6 +24,7 @@ #include <map> #include <vector> +#include "base/iteration_range.h" #include "base/leb128.h" #include "base/stl_util.h" #include "dex/dex_file-inl.h" @@ -107,37 +108,153 @@ class AbstractDispatcher { DISALLOW_COPY_AND_ASSIGN(AbstractDispatcher); }; -// Collections become owners of the objects added by moving them into unique pointers. -template<class T> class CollectionBase { +template<class T> class Iterator : public std::iterator<std::random_access_iterator_tag, T> { public: - CollectionBase() = default; + using value_type = typename std::iterator<std::random_access_iterator_tag, T>::value_type; + using difference_type = + typename std::iterator<std::random_access_iterator_tag, value_type>::difference_type; + using pointer = typename std::iterator<std::random_access_iterator_tag, value_type>::pointer; + using reference = typename std::iterator<std::random_access_iterator_tag, value_type>::reference; + + Iterator(const Iterator&) = default; + Iterator(Iterator&&) = default; + Iterator& operator=(const Iterator&) = default; + Iterator& operator=(Iterator&&) = default; + + Iterator(const std::vector<T>& vector, + uint32_t position, + uint32_t iterator_end) + : vector_(&vector), + position_(position), + iterator_end_(iterator_end) { } + Iterator() : vector_(nullptr), position_(0U), iterator_end_(0U) { } + + bool IsValid() const { return position_ < iterator_end_; } + + bool operator==(const Iterator& rhs) const { return position_ == rhs.position_; } + bool operator!=(const Iterator& rhs) const { return !(*this == rhs); } + bool operator<(const Iterator& rhs) const { return position_ < rhs.position_; } + bool operator>(const Iterator& rhs) const { return rhs < *this; } + bool operator<=(const Iterator& rhs) const { return !(rhs < *this); } + bool operator>=(const Iterator& rhs) const { return !(*this < rhs); } + + Iterator& operator++() { // Value after modification. + ++position_; + return *this; + } - uint32_t GetOffset() const { - return offset_; + Iterator operator++(int) { + Iterator temp = *this; + ++position_; + return temp; + } + + Iterator& operator+=(difference_type delta) { + position_ += delta; + return *this; + } + + Iterator operator+(difference_type delta) const { + Iterator temp = *this; + temp += delta; + return temp; + } + + Iterator& operator--() { // Value after modification. + --position_; + return *this; + } + + Iterator operator--(int) { + Iterator temp = *this; + --position_; + return temp; } - void SetOffset(uint32_t new_offset) { - offset_ = new_offset; + + Iterator& operator-=(difference_type delta) { + position_ -= delta; + return *this; + } + + Iterator operator-(difference_type delta) const { + Iterator temp = *this; + temp -= delta; + return temp; + } + + difference_type operator-(const Iterator& rhs) { + return position_ - rhs.position_; + } + + reference operator*() const { + return const_cast<reference>((*vector_)[position_]); + } + + pointer operator->() const { + return const_cast<pointer>(&((*vector_)[position_])); + } + + reference operator[](difference_type n) const { + return (*vector_)[position_ + n]; } private: + const std::vector<T>* vector_; + uint32_t position_; + uint32_t iterator_end_; + + template <typename U> + friend bool operator<(const Iterator<U>& lhs, const Iterator<U>& rhs); +}; + +// Collections become owners of the objects added by moving them into unique pointers. +class CollectionBase { + public: + CollectionBase() = default; + virtual ~CollectionBase() { } + + uint32_t GetOffset() const { return offset_; } + void SetOffset(uint32_t new_offset) { offset_ = new_offset; } + virtual uint32_t Size() const { return 0U; } + + private: // Start out unassigned. uint32_t offset_ = 0u; DISALLOW_COPY_AND_ASSIGN(CollectionBase); }; -template<class T> class CollectionVector : public CollectionBase<T> { +template<class T> class CollectionVector : public CollectionBase { public: - using Vector = std::vector<std::unique_ptr<T>>; + using ElementType = std::unique_ptr<T>; + CollectionVector() { } explicit CollectionVector(size_t size) { // Preallocate so that assignment does not invalidate pointers into the vector. collection_.reserve(size); } + virtual ~CollectionVector() OVERRIDE { } - uint32_t Size() const { return collection_.size(); } - Vector& Collection() { return collection_; } - const Vector& Collection() const { return collection_; } + template<class... Args> + T* CreateAndAddItem(Args&&... args) { + T* object = new T(std::forward<Args>(args)...); + collection_.push_back(std::unique_ptr<T>(object)); + return object; + } + + virtual uint32_t Size() const OVERRIDE { return collection_.size(); } + + Iterator<ElementType> begin() const { return Iterator<ElementType>(collection_, 0U, Size()); } + Iterator<ElementType> end() const { return Iterator<ElementType>(collection_, Size(), Size()); } + + const ElementType& operator[](size_t index) const { + DCHECK_LT(index, Size()); + return collection_[index]; + } + ElementType& operator[](size_t index) { + DCHECK_LT(index, Size()); + return collection_[index]; + } // Sort the vector by copying pointers over. template <typename MapType> @@ -147,24 +264,16 @@ template<class T> class CollectionVector : public CollectionBase<T> { for (size_t i = 0; i < Size(); ++i) { // There are times when the array will temporarily contain the same pointer twice, doing the // release here sure there is no double free errors. - Collection()[i].release(); - Collection()[i].reset(it->second); + collection_[i].release(); + collection_[i].reset(it->second); ++it; } } protected: - Vector collection_; - - template<class... Args> - T* CreateAndAddItem(Args&&... args) { - T* object = new T(std::forward<Args>(args)...); - collection_.push_back(std::unique_ptr<T>(object)); - return object; - } + std::vector<ElementType> collection_; private: - friend class Collections; DISALLOW_COPY_AND_ASSIGN(CollectionVector); }; @@ -174,7 +283,6 @@ template<class T> class IndexedCollectionVector : public CollectionVector<T> { IndexedCollectionVector() = default; explicit IndexedCollectionVector(size_t size) : CollectionVector<T>(size) { } - private: template <class... Args> T* CreateAndAddIndexedItem(uint32_t index, Args&&... args) { T* object = CollectionVector<T>::CreateAndAddItem(std::forward<Args>(args)...); @@ -182,330 +290,13 @@ template<class T> class IndexedCollectionVector : public CollectionVector<T> { return object; } - T* GetElement(uint32_t index) { - DCHECK_LT(index, CollectionVector<T>::Size()); + T* operator[](size_t index) const { DCHECK_NE(CollectionVector<T>::collection_[index].get(), static_cast<T*>(nullptr)); return CollectionVector<T>::collection_[index].get(); } - friend class Collections; - DISALLOW_COPY_AND_ASSIGN(IndexedCollectionVector); -}; - -template<class T> class CollectionMap : public CollectionBase<T> { - public: - CollectionMap() = default; - - // Returns the existing item if it is already inserted, null otherwise. - T* GetExistingObject(uint32_t offset) { - auto it = collection_.find(offset); - return it != collection_.end() ? it->second : nullptr; - } - - // Lower case for template interop with std::map. - uint32_t size() const { return collection_.size(); } - std::map<uint32_t, T*>& Collection() { return collection_; } - private: - std::map<uint32_t, T*> collection_; - - // CollectionMaps do not own the objects they contain, therefore AddItem is supported - // rather than CreateAndAddItem. - void AddItem(T* object, uint32_t offset) { - auto it = collection_.emplace(offset, object); - CHECK(it.second) << "CollectionMap already has an object with offset " << offset << " " - << " and address " << it.first->second; - } - - friend class Collections; - DISALLOW_COPY_AND_ASSIGN(CollectionMap); -}; - -class Collections { - public: - Collections() = default; - Collections(uint32_t num_string_ids, - uint32_t num_type_ids, - uint32_t num_proto_ids, - uint32_t num_field_ids, - uint32_t num_method_ids, - uint32_t num_class_defs) - : string_ids_(num_string_ids), - type_ids_(num_type_ids), - proto_ids_(num_proto_ids), - field_ids_(num_field_ids), - method_ids_(num_method_ids), - class_defs_(num_class_defs) { } - - IndexedCollectionVector<StringId>::Vector& StringIds() { return string_ids_.Collection(); } - IndexedCollectionVector<TypeId>::Vector& TypeIds() { return type_ids_.Collection(); } - IndexedCollectionVector<ProtoId>::Vector& ProtoIds() { return proto_ids_.Collection(); } - IndexedCollectionVector<FieldId>::Vector& FieldIds() { return field_ids_.Collection(); } - IndexedCollectionVector<MethodId>::Vector& MethodIds() { return method_ids_.Collection(); } - IndexedCollectionVector<ClassDef>::Vector& ClassDefs() { return class_defs_.Collection(); } - CollectionVector<CallSiteId>::Vector& CallSiteIds() { return call_site_ids_.Collection(); } - CollectionVector<MethodHandleItem>::Vector& MethodHandleItems() - { return method_handle_items_.Collection(); } - CollectionVector<StringData>::Vector& StringDatas() { return string_datas_.Collection(); } - CollectionVector<TypeList>::Vector& TypeLists() { return type_lists_.Collection(); } - CollectionVector<EncodedArrayItem>::Vector& EncodedArrayItems() - { return encoded_array_items_.Collection(); } - CollectionVector<AnnotationItem>::Vector& AnnotationItems() - { return annotation_items_.Collection(); } - CollectionVector<AnnotationSetItem>::Vector& AnnotationSetItems() - { return annotation_set_items_.Collection(); } - CollectionVector<AnnotationSetRefList>::Vector& AnnotationSetRefLists() - { return annotation_set_ref_lists_.Collection(); } - CollectionVector<AnnotationsDirectoryItem>::Vector& AnnotationsDirectoryItems() - { return annotations_directory_items_.Collection(); } - CollectionVector<DebugInfoItem>::Vector& DebugInfoItems() - { return debug_info_items_.Collection(); } - CollectionVector<CodeItem>::Vector& CodeItems() { return code_items_.Collection(); } - CollectionVector<ClassData>::Vector& ClassDatas() { return class_datas_.Collection(); } - - const CollectionVector<ClassDef>::Vector& ClassDefs() const { return class_defs_.Collection(); } - - void CreateStringId(const DexFile& dex_file, uint32_t i); - void CreateTypeId(const DexFile& dex_file, uint32_t i); - void CreateProtoId(const DexFile& dex_file, uint32_t i); - void CreateFieldId(const DexFile& dex_file, uint32_t i); - void CreateMethodId(const DexFile& dex_file, uint32_t i); - void CreateClassDef(const DexFile& dex_file, uint32_t i); - void CreateCallSiteId(const DexFile& dex_file, uint32_t i); - void CreateMethodHandleItem(const DexFile& dex_file, uint32_t i); - - void CreateCallSitesAndMethodHandles(const DexFile& dex_file); - - TypeList* CreateTypeList(const DexFile::TypeList* type_list, uint32_t offset); - EncodedArrayItem* CreateEncodedArrayItem(const DexFile& dex_file, - const uint8_t* static_data, - uint32_t offset); - AnnotationItem* CreateAnnotationItem(const DexFile& dex_file, - const DexFile::AnnotationItem* annotation); - AnnotationSetItem* CreateAnnotationSetItem(const DexFile& dex_file, - const DexFile::AnnotationSetItem* disk_annotations_item, uint32_t offset); - AnnotationsDirectoryItem* CreateAnnotationsDirectoryItem(const DexFile& dex_file, - const DexFile::AnnotationsDirectoryItem* disk_annotations_item, uint32_t offset); - CodeItem* DedupeOrCreateCodeItem(const DexFile& dex_file, - const DexFile::CodeItem* disk_code_item, - uint32_t offset, - uint32_t dex_method_index); - ClassData* CreateClassData(const DexFile& dex_file, const uint8_t* encoded_data, uint32_t offset); - void AddAnnotationsFromMapListSection(const DexFile& dex_file, - uint32_t start_offset, - uint32_t count); - - StringId* GetStringId(uint32_t index) { - return string_ids_.GetElement(index); - } - TypeId* GetTypeId(uint32_t index) { - return type_ids_.GetElement(index); - } - ProtoId* GetProtoId(uint32_t index) { - return proto_ids_.GetElement(index); - } - FieldId* GetFieldId(uint32_t index) { - return field_ids_.GetElement(index); - } - MethodId* GetMethodId(uint32_t index) { - return method_ids_.GetElement(index); - } - ClassDef* GetClassDef(uint32_t index) { - return class_defs_.GetElement(index); - } - CallSiteId* GetCallSiteId(uint32_t index) { - CHECK_LT(index, CallSiteIdsSize()); - return CallSiteIds()[index].get(); - } - MethodHandleItem* GetMethodHandle(uint32_t index) { - CHECK_LT(index, MethodHandleItemsSize()); - return MethodHandleItems()[index].get(); - } - - StringId* GetStringIdOrNullPtr(uint32_t index) { - return index == dex::kDexNoIndex ? nullptr : GetStringId(index); - } - TypeId* GetTypeIdOrNullPtr(uint16_t index) { - return index == DexFile::kDexNoIndex16 ? nullptr : GetTypeId(index); - } - - uint32_t StringIdsOffset() const { return string_ids_.GetOffset(); } - uint32_t TypeIdsOffset() const { return type_ids_.GetOffset(); } - uint32_t ProtoIdsOffset() const { return proto_ids_.GetOffset(); } - uint32_t FieldIdsOffset() const { return field_ids_.GetOffset(); } - uint32_t MethodIdsOffset() const { return method_ids_.GetOffset(); } - uint32_t ClassDefsOffset() const { return class_defs_.GetOffset(); } - uint32_t CallSiteIdsOffset() const { return call_site_ids_.GetOffset(); } - uint32_t MethodHandleItemsOffset() const { return method_handle_items_.GetOffset(); } - uint32_t StringDatasOffset() const { return string_datas_.GetOffset(); } - uint32_t TypeListsOffset() const { return type_lists_.GetOffset(); } - uint32_t EncodedArrayItemsOffset() const { return encoded_array_items_.GetOffset(); } - uint32_t AnnotationItemsOffset() const { return annotation_items_.GetOffset(); } - uint32_t AnnotationSetItemsOffset() const { return annotation_set_items_.GetOffset(); } - uint32_t AnnotationSetRefListsOffset() const { return annotation_set_ref_lists_.GetOffset(); } - uint32_t AnnotationsDirectoryItemsOffset() const - { return annotations_directory_items_.GetOffset(); } - uint32_t DebugInfoItemsOffset() const { return debug_info_items_.GetOffset(); } - uint32_t CodeItemsOffset() const { return code_items_.GetOffset(); } - uint32_t ClassDatasOffset() const { return class_datas_.GetOffset(); } - uint32_t MapListOffset() const { return map_list_offset_; } - - void SetStringIdsOffset(uint32_t new_offset) { string_ids_.SetOffset(new_offset); } - void SetTypeIdsOffset(uint32_t new_offset) { type_ids_.SetOffset(new_offset); } - void SetProtoIdsOffset(uint32_t new_offset) { proto_ids_.SetOffset(new_offset); } - void SetFieldIdsOffset(uint32_t new_offset) { field_ids_.SetOffset(new_offset); } - void SetMethodIdsOffset(uint32_t new_offset) { method_ids_.SetOffset(new_offset); } - void SetClassDefsOffset(uint32_t new_offset) { class_defs_.SetOffset(new_offset); } - void SetCallSiteIdsOffset(uint32_t new_offset) { call_site_ids_.SetOffset(new_offset); } - void SetMethodHandleItemsOffset(uint32_t new_offset) - { method_handle_items_.SetOffset(new_offset); } - void SetStringDatasOffset(uint32_t new_offset) { string_datas_.SetOffset(new_offset); } - void SetTypeListsOffset(uint32_t new_offset) { type_lists_.SetOffset(new_offset); } - void SetEncodedArrayItemsOffset(uint32_t new_offset) - { encoded_array_items_.SetOffset(new_offset); } - void SetAnnotationItemsOffset(uint32_t new_offset) { annotation_items_.SetOffset(new_offset); } - void SetAnnotationSetItemsOffset(uint32_t new_offset) - { annotation_set_items_.SetOffset(new_offset); } - void SetAnnotationSetRefListsOffset(uint32_t new_offset) - { annotation_set_ref_lists_.SetOffset(new_offset); } - void SetAnnotationsDirectoryItemsOffset(uint32_t new_offset) - { annotations_directory_items_.SetOffset(new_offset); } - void SetDebugInfoItemsOffset(uint32_t new_offset) { debug_info_items_.SetOffset(new_offset); } - void SetCodeItemsOffset(uint32_t new_offset) { code_items_.SetOffset(new_offset); } - void SetClassDatasOffset(uint32_t new_offset) { class_datas_.SetOffset(new_offset); } - void SetMapListOffset(uint32_t new_offset) { map_list_offset_ = new_offset; } - - uint32_t StringIdsSize() const { return string_ids_.Size(); } - uint32_t TypeIdsSize() const { return type_ids_.Size(); } - uint32_t ProtoIdsSize() const { return proto_ids_.Size(); } - uint32_t FieldIdsSize() const { return field_ids_.Size(); } - uint32_t MethodIdsSize() const { return method_ids_.Size(); } - uint32_t ClassDefsSize() const { return class_defs_.Size(); } - uint32_t CallSiteIdsSize() const { return call_site_ids_.Size(); } - uint32_t MethodHandleItemsSize() const { return method_handle_items_.Size(); } - uint32_t StringDatasSize() const { return string_datas_.Size(); } - uint32_t TypeListsSize() const { return type_lists_.Size(); } - uint32_t EncodedArrayItemsSize() const { return encoded_array_items_.Size(); } - uint32_t AnnotationItemsSize() const { return annotation_items_.Size(); } - uint32_t AnnotationSetItemsSize() const { return annotation_set_items_.Size(); } - uint32_t AnnotationSetRefListsSize() const { return annotation_set_ref_lists_.Size(); } - uint32_t AnnotationsDirectoryItemsSize() const { return annotations_directory_items_.Size(); } - uint32_t DebugInfoItemsSize() const { return debug_info_items_.Size(); } - uint32_t CodeItemsSize() const { return code_items_.Size(); } - uint32_t ClassDatasSize() const { return class_datas_.Size(); } - - // Sort the vectors buy map order (same order that was used in the input file). - void SortVectorsByMapOrder(); - // Empty the maps, which are only used for IR construction. - void ClearMaps(); - - template <typename Type, class... Args> - Type* CreateAndAddItem(CollectionMap<Type>& map, - CollectionVector<Type>& vector, - uint32_t offset, - Args&&... args) { - Type* item = vector.CreateAndAddItem(std::forward<Args>(args)...); - DCHECK(!map.GetExistingObject(offset)); - DCHECK(!item->OffsetAssigned()); - if (eagerly_assign_offsets_) { - item->SetOffset(offset); - } - map.AddItem(item, offset); - return item; - } - - template <typename Type, class... Args> - Type* CreateAndAddIndexedItem(IndexedCollectionVector<Type>& vector, - uint32_t offset, - uint32_t index, - Args&&... args) { - Type* item = vector.CreateAndAddIndexedItem(index, std::forward<Args>(args)...); - DCHECK(!item->OffsetAssigned()); - if (eagerly_assign_offsets_) { - item->SetOffset(offset); - } - return item; - } - - void SetEagerlyAssignOffsets(bool eagerly_assign_offsets) { - eagerly_assign_offsets_ = eagerly_assign_offsets; - } - - void SetLinkData(std::vector<uint8_t>&& link_data) { - link_data_ = std::move(link_data); - } - - const std::vector<uint8_t>& LinkData() const { - return link_data_; - } - - private: - EncodedValue* ReadEncodedValue(const DexFile& dex_file, const uint8_t** data); - EncodedValue* ReadEncodedValue(const DexFile& dex_file, - const uint8_t** data, - uint8_t type, - uint8_t length); - void ReadEncodedValue(const DexFile& dex_file, - const uint8_t** data, - uint8_t type, - uint8_t length, - EncodedValue* item); - - ParameterAnnotation* GenerateParameterAnnotation(const DexFile& dex_file, MethodId* method_id, - const DexFile::AnnotationSetRefList* annotation_set_ref_list, uint32_t offset); - MethodItem GenerateMethodItem(const DexFile& dex_file, ClassDataItemIterator& cdii); - - // Collection vectors own the IR data. - IndexedCollectionVector<StringId> string_ids_; - IndexedCollectionVector<TypeId> type_ids_; - IndexedCollectionVector<ProtoId> proto_ids_; - IndexedCollectionVector<FieldId> field_ids_; - IndexedCollectionVector<MethodId> method_ids_; - IndexedCollectionVector<ClassDef> class_defs_; - IndexedCollectionVector<CallSiteId> call_site_ids_; - IndexedCollectionVector<MethodHandleItem> method_handle_items_; - IndexedCollectionVector<StringData> string_datas_; - IndexedCollectionVector<TypeList> type_lists_; - IndexedCollectionVector<EncodedArrayItem> encoded_array_items_; - IndexedCollectionVector<AnnotationItem> annotation_items_; - IndexedCollectionVector<AnnotationSetItem> annotation_set_items_; - IndexedCollectionVector<AnnotationSetRefList> annotation_set_ref_lists_; - IndexedCollectionVector<AnnotationsDirectoryItem> annotations_directory_items_; - // The order of the vectors controls the layout of the output file by index order, to change the - // layout just sort the vector. Note that you may only change the order of the non indexed vectors - // below. Indexed vectors are accessed by indices in other places, changing the sorting order will - // invalidate the existing indices and is not currently supported. - CollectionVector<DebugInfoItem> debug_info_items_; - CollectionVector<CodeItem> code_items_; - CollectionVector<ClassData> class_datas_; - - // Note that the maps do not have ownership, the vectors do. - // TODO: These maps should only be required for building the IR and should be put in a separate - // IR builder class. - CollectionMap<StringData> string_datas_map_; - CollectionMap<TypeList> type_lists_map_; - CollectionMap<EncodedArrayItem> encoded_array_items_map_; - CollectionMap<AnnotationItem> annotation_items_map_; - CollectionMap<AnnotationSetItem> annotation_set_items_map_; - CollectionMap<AnnotationSetRefList> annotation_set_ref_lists_map_; - CollectionMap<AnnotationsDirectoryItem> annotations_directory_items_map_; - CollectionMap<DebugInfoItem> debug_info_items_map_; - // Code item maps need to check both the debug info offset and debug info offset, do not use - // CollectionMap. - // First offset is the code item offset, second is the debug info offset. - std::map<std::pair<uint32_t, uint32_t>, CodeItem*> code_items_map_; - CollectionMap<ClassData> class_datas_map_; - - uint32_t map_list_offset_ = 0; - - // Link data. - std::vector<uint8_t> link_data_; - - // If we eagerly assign offsets during IR building or later after layout. Must be false if - // changing the layout is enabled. - bool eagerly_assign_offsets_; - - DISALLOW_COPY_AND_ASSIGN(Collections); + DISALLOW_COPY_AND_ASSIGN(IndexedCollectionVector); }; class Item { @@ -598,12 +389,12 @@ class Header : public Item { uint32_t num_class_defs) : Item(0, kHeaderItemSize), support_default_methods_(support_default_methods), - collections_(num_string_ids, - num_type_ids, - num_proto_ids, - num_field_ids, - num_method_ids, - num_class_defs) { + string_ids_(num_string_ids), + type_ids_(num_type_ids), + proto_ids_(num_proto_ids), + field_ids_(num_field_ids), + method_ids_(num_method_ids), + class_defs_(num_class_defs) { ConstructorHelper(magic, checksum, signature, @@ -641,7 +432,69 @@ class Header : public Item { void SetDataSize(uint32_t new_data_size) { data_size_ = new_data_size; } void SetDataOffset(uint32_t new_data_offset) { data_offset_ = new_data_offset; } - Collections& GetCollections() { return collections_; } + IndexedCollectionVector<StringId>& StringIds() { return string_ids_; } + const IndexedCollectionVector<StringId>& StringIds() const { return string_ids_; } + IndexedCollectionVector<TypeId>& TypeIds() { return type_ids_; } + const IndexedCollectionVector<TypeId>& TypeIds() const { return type_ids_; } + IndexedCollectionVector<ProtoId>& ProtoIds() { return proto_ids_; } + const IndexedCollectionVector<ProtoId>& ProtoIds() const { return proto_ids_; } + IndexedCollectionVector<FieldId>& FieldIds() { return field_ids_; } + const IndexedCollectionVector<FieldId>& FieldIds() const { return field_ids_; } + IndexedCollectionVector<MethodId>& MethodIds() { return method_ids_; } + const IndexedCollectionVector<MethodId>& MethodIds() const { return method_ids_; } + IndexedCollectionVector<ClassDef>& ClassDefs() { return class_defs_; } + const IndexedCollectionVector<ClassDef>& ClassDefs() const { return class_defs_; } + IndexedCollectionVector<CallSiteId>& CallSiteIds() { return call_site_ids_; } + const IndexedCollectionVector<CallSiteId>& CallSiteIds() const { return call_site_ids_; } + IndexedCollectionVector<MethodHandleItem>& MethodHandleItems() { return method_handle_items_; } + const IndexedCollectionVector<MethodHandleItem>& MethodHandleItems() const { + return method_handle_items_; + } + CollectionVector<StringData>& StringDatas() { return string_datas_; } + const CollectionVector<StringData>& StringDatas() const { return string_datas_; } + CollectionVector<TypeList>& TypeLists() { return type_lists_; } + const CollectionVector<TypeList>& TypeLists() const { return type_lists_; } + CollectionVector<EncodedArrayItem>& EncodedArrayItems() { return encoded_array_items_; } + const CollectionVector<EncodedArrayItem>& EncodedArrayItems() const { + return encoded_array_items_; + } + CollectionVector<AnnotationItem>& AnnotationItems() { return annotation_items_; } + const CollectionVector<AnnotationItem>& AnnotationItems() const { return annotation_items_; } + CollectionVector<AnnotationSetItem>& AnnotationSetItems() { return annotation_set_items_; } + const CollectionVector<AnnotationSetItem>& AnnotationSetItems() const { + return annotation_set_items_; + } + CollectionVector<AnnotationSetRefList>& AnnotationSetRefLists() { + return annotation_set_ref_lists_; + } + const CollectionVector<AnnotationSetRefList>& AnnotationSetRefLists() const { + return annotation_set_ref_lists_; + } + CollectionVector<AnnotationsDirectoryItem>& AnnotationsDirectoryItems() { + return annotations_directory_items_; + } + const CollectionVector<AnnotationsDirectoryItem>& AnnotationsDirectoryItems() const { + return annotations_directory_items_; + } + CollectionVector<DebugInfoItem>& DebugInfoItems() { return debug_info_items_; } + const CollectionVector<DebugInfoItem>& DebugInfoItems() const { return debug_info_items_; } + CollectionVector<CodeItem>& CodeItems() { return code_items_; } + const CollectionVector<CodeItem>& CodeItems() const { return code_items_; } + CollectionVector<ClassData>& ClassDatas() { return class_datas_; } + const CollectionVector<ClassData>& ClassDatas() const { return class_datas_; } + + StringId* GetStringIdOrNullPtr(uint32_t index) { + return index == dex::kDexNoIndex ? nullptr : StringIds()[index]; + } + TypeId* GetTypeIdOrNullPtr(uint16_t index) { + return index == DexFile::kDexNoIndex16 ? nullptr : TypeIds()[index]; + } + + uint32_t MapListOffset() const { return map_list_offset_; } + void SetMapListOffset(uint32_t new_offset) { map_list_offset_ = new_offset; } + + const std::vector<uint8_t>& LinkData() const { return link_data_; } + void SetLinkData(std::vector<uint8_t>&& link_data) { link_data_ = std::move(link_data); } void Accept(AbstractDispatcher* dispatch) { dispatch->Dispatch(this); } @@ -683,7 +536,35 @@ class Header : public Item { memcpy(magic_, magic, sizeof(magic_)); memcpy(signature_, signature, sizeof(signature_)); } - Collections collections_; + + // Collection vectors own the IR data. + IndexedCollectionVector<StringId> string_ids_; + IndexedCollectionVector<TypeId> type_ids_; + IndexedCollectionVector<ProtoId> proto_ids_; + IndexedCollectionVector<FieldId> field_ids_; + IndexedCollectionVector<MethodId> method_ids_; + IndexedCollectionVector<ClassDef> class_defs_; + IndexedCollectionVector<CallSiteId> call_site_ids_; + IndexedCollectionVector<MethodHandleItem> method_handle_items_; + IndexedCollectionVector<StringData> string_datas_; + IndexedCollectionVector<TypeList> type_lists_; + IndexedCollectionVector<EncodedArrayItem> encoded_array_items_; + IndexedCollectionVector<AnnotationItem> annotation_items_; + IndexedCollectionVector<AnnotationSetItem> annotation_set_items_; + IndexedCollectionVector<AnnotationSetRefList> annotation_set_ref_lists_; + IndexedCollectionVector<AnnotationsDirectoryItem> annotations_directory_items_; + // The order of the vectors controls the layout of the output file by index order, to change the + // layout just sort the vector. Note that you may only change the order of the non indexed vectors + // below. Indexed vectors are accessed by indices in other places, changing the sorting order will + // invalidate the existing indices and is not currently supported. + CollectionVector<DebugInfoItem> debug_info_items_; + CollectionVector<CodeItem> code_items_; + CollectionVector<ClassData> class_datas_; + + uint32_t map_list_offset_ = 0; + + // Link data. + std::vector<uint8_t> link_data_; DISALLOW_COPY_AND_ASSIGN(Header); }; diff --git a/dexlayout/dex_ir_builder.cc b/dexlayout/dex_ir_builder.cc index 9468f763d6..a04a2349c4 100644 --- a/dexlayout/dex_ir_builder.cc +++ b/dexlayout/dex_ir_builder.cc @@ -20,14 +20,226 @@ #include <vector> #include "dex_ir_builder.h" + +#include "dex/code_item_accessors-inl.h" +#include "dex/dex_file_exception_helpers.h" #include "dexlayout.h" namespace art { namespace dex_ir { -static void CheckAndSetRemainingOffsets(const DexFile& dex_file, - Collections* collections, - const Options& options); +static uint64_t ReadVarWidth(const uint8_t** data, uint8_t length, bool sign_extend) { + uint64_t value = 0; + for (uint32_t i = 0; i <= length; i++) { + value |= static_cast<uint64_t>(*(*data)++) << (i * 8); + } + if (sign_extend) { + int shift = (7 - length) * 8; + return (static_cast<int64_t>(value) << shift) >> shift; + } + return value; +} + +static uint32_t GetDebugInfoStreamSize(const uint8_t* debug_info_stream) { + const uint8_t* stream = debug_info_stream; + DecodeUnsignedLeb128(&stream); // line_start + uint32_t parameters_size = DecodeUnsignedLeb128(&stream); + for (uint32_t i = 0; i < parameters_size; ++i) { + DecodeUnsignedLeb128P1(&stream); // Parameter name. + } + + for (;;) { + uint8_t opcode = *stream++; + switch (opcode) { + case DexFile::DBG_END_SEQUENCE: + return stream - debug_info_stream; // end of stream. + case DexFile::DBG_ADVANCE_PC: + DecodeUnsignedLeb128(&stream); // addr_diff + break; + case DexFile::DBG_ADVANCE_LINE: + DecodeSignedLeb128(&stream); // line_diff + break; + case DexFile::DBG_START_LOCAL: + DecodeUnsignedLeb128(&stream); // register_num + DecodeUnsignedLeb128P1(&stream); // name_idx + DecodeUnsignedLeb128P1(&stream); // type_idx + break; + case DexFile::DBG_START_LOCAL_EXTENDED: + DecodeUnsignedLeb128(&stream); // register_num + DecodeUnsignedLeb128P1(&stream); // name_idx + DecodeUnsignedLeb128P1(&stream); // type_idx + DecodeUnsignedLeb128P1(&stream); // sig_idx + break; + case DexFile::DBG_END_LOCAL: + case DexFile::DBG_RESTART_LOCAL: + DecodeUnsignedLeb128(&stream); // register_num + break; + case DexFile::DBG_SET_PROLOGUE_END: + case DexFile::DBG_SET_EPILOGUE_BEGIN: + break; + case DexFile::DBG_SET_FILE: { + DecodeUnsignedLeb128P1(&stream); // name_idx + break; + } + default: { + break; + } + } + } +} + +template<class T> class CollectionMap : public CollectionBase { + public: + CollectionMap() = default; + virtual ~CollectionMap() OVERRIDE { } + + template <class... Args> + T* CreateAndAddItem(CollectionVector<T>& vector, + bool eagerly_assign_offsets, + uint32_t offset, + Args&&... args) { + T* item = vector.CreateAndAddItem(std::forward<Args>(args)...); + DCHECK(!GetExistingObject(offset)); + DCHECK(!item->OffsetAssigned()); + if (eagerly_assign_offsets) { + item->SetOffset(offset); + } + AddItem(item, offset); + return item; + } + + // Returns the existing item if it is already inserted, null otherwise. + T* GetExistingObject(uint32_t offset) { + auto it = collection_.find(offset); + return it != collection_.end() ? it->second : nullptr; + } + + // Lower case for template interop with std::map. + uint32_t size() const { return collection_.size(); } + std::map<uint32_t, T*>& Collection() { return collection_; } + + private: + std::map<uint32_t, T*> collection_; + + // CollectionMaps do not own the objects they contain, therefore AddItem is supported + // rather than CreateAndAddItem. + void AddItem(T* object, uint32_t offset) { + auto it = collection_.emplace(offset, object); + CHECK(it.second) << "CollectionMap already has an object with offset " << offset << " " + << " and address " << it.first->second; + } + + DISALLOW_COPY_AND_ASSIGN(CollectionMap); +}; + +class BuilderMaps { + public: + BuilderMaps(Header* header, bool eagerly_assign_offsets) + : header_(header), eagerly_assign_offsets_(eagerly_assign_offsets) { } + + void CreateStringId(const DexFile& dex_file, uint32_t i); + void CreateTypeId(const DexFile& dex_file, uint32_t i); + void CreateProtoId(const DexFile& dex_file, uint32_t i); + void CreateFieldId(const DexFile& dex_file, uint32_t i); + void CreateMethodId(const DexFile& dex_file, uint32_t i); + void CreateClassDef(const DexFile& dex_file, uint32_t i); + void CreateCallSiteId(const DexFile& dex_file, uint32_t i); + void CreateMethodHandleItem(const DexFile& dex_file, uint32_t i); + + void CreateCallSitesAndMethodHandles(const DexFile& dex_file); + + TypeList* CreateTypeList(const DexFile::TypeList* type_list, uint32_t offset); + EncodedArrayItem* CreateEncodedArrayItem(const DexFile& dex_file, + const uint8_t* static_data, + uint32_t offset); + AnnotationItem* CreateAnnotationItem(const DexFile& dex_file, + const DexFile::AnnotationItem* annotation); + AnnotationSetItem* CreateAnnotationSetItem(const DexFile& dex_file, + const DexFile::AnnotationSetItem* disk_annotations_item, uint32_t offset); + AnnotationsDirectoryItem* CreateAnnotationsDirectoryItem(const DexFile& dex_file, + const DexFile::AnnotationsDirectoryItem* disk_annotations_item, uint32_t offset); + CodeItem* DedupeOrCreateCodeItem(const DexFile& dex_file, + const DexFile::CodeItem* disk_code_item, + uint32_t offset, + uint32_t dex_method_index); + ClassData* CreateClassData(const DexFile& dex_file, const uint8_t* encoded_data, uint32_t offset); + + void AddAnnotationsFromMapListSection(const DexFile& dex_file, + uint32_t start_offset, + uint32_t count); + + void CheckAndSetRemainingOffsets(const DexFile& dex_file, const Options& options); + + // Sort the vectors buy map order (same order that was used in the input file). + void SortVectorsByMapOrder(); + + private: + bool GetIdsFromByteCode(const CodeItem* code, + std::vector<TypeId*>* type_ids, + std::vector<StringId*>* string_ids, + std::vector<MethodId*>* method_ids, + std::vector<FieldId*>* field_ids); + + bool GetIdFromInstruction(const Instruction* dec_insn, + std::vector<TypeId*>* type_ids, + std::vector<StringId*>* string_ids, + std::vector<MethodId*>* method_ids, + std::vector<FieldId*>* field_ids); + + EncodedValue* ReadEncodedValue(const DexFile& dex_file, const uint8_t** data); + EncodedValue* ReadEncodedValue(const DexFile& dex_file, + const uint8_t** data, + uint8_t type, + uint8_t length); + void ReadEncodedValue(const DexFile& dex_file, + const uint8_t** data, + uint8_t type, + uint8_t length, + EncodedValue* item); + + MethodItem GenerateMethodItem(const DexFile& dex_file, ClassDataItemIterator& cdii); + + ParameterAnnotation* GenerateParameterAnnotation( + const DexFile& dex_file, + MethodId* method_id, + const DexFile::AnnotationSetRefList* annotation_set_ref_list, + uint32_t offset); + + template <typename Type, class... Args> + Type* CreateAndAddIndexedItem(IndexedCollectionVector<Type>& vector, + uint32_t offset, + uint32_t index, + Args&&... args) { + Type* item = vector.CreateAndAddIndexedItem(index, std::forward<Args>(args)...); + DCHECK(!item->OffsetAssigned()); + if (eagerly_assign_offsets_) { + item->SetOffset(offset); + } + return item; + } + + Header* header_; + // If we eagerly assign offsets during IR building or later after layout. Must be false if + // changing the layout is enabled. + bool eagerly_assign_offsets_; + + // Note: maps do not have ownership. + CollectionMap<StringData> string_datas_map_; + CollectionMap<TypeList> type_lists_map_; + CollectionMap<EncodedArrayItem> encoded_array_items_map_; + CollectionMap<AnnotationItem> annotation_items_map_; + CollectionMap<AnnotationSetItem> annotation_set_items_map_; + CollectionMap<AnnotationSetRefList> annotation_set_ref_lists_map_; + CollectionMap<AnnotationsDirectoryItem> annotations_directory_items_map_; + CollectionMap<DebugInfoItem> debug_info_items_map_; + // Code item maps need to check both the debug info offset and debug info offset, do not use + // CollectionMap. + // First offset is the code item offset, second is the debug info offset. + std::map<std::pair<uint32_t, uint32_t>, CodeItem*> code_items_map_; + CollectionMap<ClassData> class_datas_map_; + + DISALLOW_COPY_AND_ASSIGN(BuilderMaps); +}; Header* DexIrBuilder(const DexFile& dex_file, bool eagerly_assign_offsets, @@ -50,36 +262,35 @@ Header* DexIrBuilder(const DexFile& dex_file, dex_file.NumFieldIds(), dex_file.NumMethodIds(), dex_file.NumClassDefs()); - Collections& collections = header->GetCollections(); - collections.SetEagerlyAssignOffsets(eagerly_assign_offsets); + BuilderMaps builder_maps(header, eagerly_assign_offsets); // Walk the rest of the header fields. // StringId table. - collections.SetStringIdsOffset(disk_header.string_ids_off_); + header->StringIds().SetOffset(disk_header.string_ids_off_); for (uint32_t i = 0; i < dex_file.NumStringIds(); ++i) { - collections.CreateStringId(dex_file, i); + builder_maps.CreateStringId(dex_file, i); } // TypeId table. - collections.SetTypeIdsOffset(disk_header.type_ids_off_); + header->TypeIds().SetOffset(disk_header.type_ids_off_); for (uint32_t i = 0; i < dex_file.NumTypeIds(); ++i) { - collections.CreateTypeId(dex_file, i); + builder_maps.CreateTypeId(dex_file, i); } // ProtoId table. - collections.SetProtoIdsOffset(disk_header.proto_ids_off_); + header->ProtoIds().SetOffset(disk_header.proto_ids_off_); for (uint32_t i = 0; i < dex_file.NumProtoIds(); ++i) { - collections.CreateProtoId(dex_file, i); + builder_maps.CreateProtoId(dex_file, i); } // FieldId table. - collections.SetFieldIdsOffset(disk_header.field_ids_off_); + header->FieldIds().SetOffset(disk_header.field_ids_off_); for (uint32_t i = 0; i < dex_file.NumFieldIds(); ++i) { - collections.CreateFieldId(dex_file, i); + builder_maps.CreateFieldId(dex_file, i); } // MethodId table. - collections.SetMethodIdsOffset(disk_header.method_ids_off_); + header->MethodIds().SetOffset(disk_header.method_ids_off_); for (uint32_t i = 0; i < dex_file.NumMethodIds(); ++i) { - collections.CreateMethodId(dex_file, i); + builder_maps.CreateMethodId(dex_file, i); } // ClassDef table. - collections.SetClassDefsOffset(disk_header.class_defs_off_); + header->ClassDefs().SetOffset(disk_header.class_defs_off_); for (uint32_t i = 0; i < dex_file.NumClassDefs(); ++i) { if (!options.class_filter_.empty()) { // If the filter is enabled (not empty), filter out classes that don't have a matching @@ -90,29 +301,29 @@ Header* DexIrBuilder(const DexFile& dex_file, continue; } } - collections.CreateClassDef(dex_file, i); + builder_maps.CreateClassDef(dex_file, i); } // MapItem. - collections.SetMapListOffset(disk_header.map_off_); + header->SetMapListOffset(disk_header.map_off_); // CallSiteIds and MethodHandleItems. - collections.CreateCallSitesAndMethodHandles(dex_file); - CheckAndSetRemainingOffsets(dex_file, &collections, options); + builder_maps.CreateCallSitesAndMethodHandles(dex_file); + builder_maps.CheckAndSetRemainingOffsets(dex_file, options); // Sort the vectors by the map order (same order as the file). - collections.SortVectorsByMapOrder(); - collections.ClearMaps(); + builder_maps.SortVectorsByMapOrder(); // Load the link data if it exists. - collections.SetLinkData(std::vector<uint8_t>( + header->SetLinkData(std::vector<uint8_t>( dex_file.DataBegin() + dex_file.GetHeader().link_off_, dex_file.DataBegin() + dex_file.GetHeader().link_off_ + dex_file.GetHeader().link_size_)); return header; } -static void CheckAndSetRemainingOffsets(const DexFile& dex_file, - Collections* collections, - const Options& options) { +/* + * Get all the types, strings, methods, and fields referred to from bytecode. + */ +void BuilderMaps::CheckAndSetRemainingOffsets(const DexFile& dex_file, const Options& options) { const DexFile::Header& disk_header = dex_file.GetHeader(); // Read MapItems and validate/set remaining offsets. const DexFile::MapList* map = dex_file.GetMapList(); @@ -125,74 +336,74 @@ static void CheckAndSetRemainingOffsets(const DexFile& dex_file, CHECK_EQ(item->offset_, 0u); break; case DexFile::kDexTypeStringIdItem: - CHECK_EQ(item->size_, collections->StringIdsSize()); - CHECK_EQ(item->offset_, collections->StringIdsOffset()); + CHECK_EQ(item->size_, header_->StringIds().Size()); + CHECK_EQ(item->offset_, header_->StringIds().GetOffset()); break; case DexFile::kDexTypeTypeIdItem: - CHECK_EQ(item->size_, collections->TypeIdsSize()); - CHECK_EQ(item->offset_, collections->TypeIdsOffset()); + CHECK_EQ(item->size_, header_->TypeIds().Size()); + CHECK_EQ(item->offset_, header_->TypeIds().GetOffset()); break; case DexFile::kDexTypeProtoIdItem: - CHECK_EQ(item->size_, collections->ProtoIdsSize()); - CHECK_EQ(item->offset_, collections->ProtoIdsOffset()); + CHECK_EQ(item->size_, header_->ProtoIds().Size()); + CHECK_EQ(item->offset_, header_->ProtoIds().GetOffset()); break; case DexFile::kDexTypeFieldIdItem: - CHECK_EQ(item->size_, collections->FieldIdsSize()); - CHECK_EQ(item->offset_, collections->FieldIdsOffset()); + CHECK_EQ(item->size_, header_->FieldIds().Size()); + CHECK_EQ(item->offset_, header_->FieldIds().GetOffset()); break; case DexFile::kDexTypeMethodIdItem: - CHECK_EQ(item->size_, collections->MethodIdsSize()); - CHECK_EQ(item->offset_, collections->MethodIdsOffset()); + CHECK_EQ(item->size_, header_->MethodIds().Size()); + CHECK_EQ(item->offset_, header_->MethodIds().GetOffset()); break; case DexFile::kDexTypeClassDefItem: if (options.class_filter_.empty()) { // The filter may have removed some classes, this will get fixed up during writing. - CHECK_EQ(item->size_, collections->ClassDefsSize()); + CHECK_EQ(item->size_, header_->ClassDefs().Size()); } - CHECK_EQ(item->offset_, collections->ClassDefsOffset()); + CHECK_EQ(item->offset_, header_->ClassDefs().GetOffset()); break; case DexFile::kDexTypeCallSiteIdItem: - CHECK_EQ(item->size_, collections->CallSiteIdsSize()); - CHECK_EQ(item->offset_, collections->CallSiteIdsOffset()); + CHECK_EQ(item->size_, header_->CallSiteIds().Size()); + CHECK_EQ(item->offset_, header_->CallSiteIds().GetOffset()); break; case DexFile::kDexTypeMethodHandleItem: - CHECK_EQ(item->size_, collections->MethodHandleItemsSize()); - CHECK_EQ(item->offset_, collections->MethodHandleItemsOffset()); + CHECK_EQ(item->size_, header_->MethodHandleItems().Size()); + CHECK_EQ(item->offset_, header_->MethodHandleItems().GetOffset()); break; case DexFile::kDexTypeMapList: CHECK_EQ(item->size_, 1u); CHECK_EQ(item->offset_, disk_header.map_off_); break; case DexFile::kDexTypeTypeList: - collections->SetTypeListsOffset(item->offset_); + header_->TypeLists().SetOffset(item->offset_); break; case DexFile::kDexTypeAnnotationSetRefList: - collections->SetAnnotationSetRefListsOffset(item->offset_); + header_->AnnotationSetRefLists().SetOffset(item->offset_); break; case DexFile::kDexTypeAnnotationSetItem: - collections->SetAnnotationSetItemsOffset(item->offset_); + header_->AnnotationSetItems().SetOffset(item->offset_); break; case DexFile::kDexTypeClassDataItem: - collections->SetClassDatasOffset(item->offset_); + header_->ClassDatas().SetOffset(item->offset_); break; case DexFile::kDexTypeCodeItem: - collections->SetCodeItemsOffset(item->offset_); + header_->CodeItems().SetOffset(item->offset_); break; case DexFile::kDexTypeStringDataItem: - collections->SetStringDatasOffset(item->offset_); + header_->StringDatas().SetOffset(item->offset_); break; case DexFile::kDexTypeDebugInfoItem: - collections->SetDebugInfoItemsOffset(item->offset_); + header_->DebugInfoItems().SetOffset(item->offset_); break; case DexFile::kDexTypeAnnotationItem: - collections->SetAnnotationItemsOffset(item->offset_); - collections->AddAnnotationsFromMapListSection(dex_file, item->offset_, item->size_); + header_->AnnotationItems().SetOffset(item->offset_); + AddAnnotationsFromMapListSection(dex_file, item->offset_, item->size_); break; case DexFile::kDexTypeEncodedArrayItem: - collections->SetEncodedArrayItemsOffset(item->offset_); + header_->EncodedArrayItems().SetOffset(item->offset_); break; case DexFile::kDexTypeAnnotationsDirectoryItem: - collections->SetAnnotationsDirectoryItemsOffset(item->offset_); + header_->AnnotationsDirectoryItems().SetOffset(item->offset_); break; default: LOG(ERROR) << "Unknown map list item type."; @@ -200,5 +411,798 @@ static void CheckAndSetRemainingOffsets(const DexFile& dex_file, } } +void BuilderMaps::CreateStringId(const DexFile& dex_file, uint32_t i) { + const DexFile::StringId& disk_string_id = dex_file.GetStringId(dex::StringIndex(i)); + StringData* string_data = + string_datas_map_.CreateAndAddItem(header_->StringDatas(), + eagerly_assign_offsets_, + disk_string_id.string_data_off_, + dex_file.GetStringData(disk_string_id)); + CreateAndAddIndexedItem(header_->StringIds(), + header_->StringIds().GetOffset() + i * StringId::ItemSize(), + i, + string_data); +} + +void BuilderMaps::CreateTypeId(const DexFile& dex_file, uint32_t i) { + const DexFile::TypeId& disk_type_id = dex_file.GetTypeId(dex::TypeIndex(i)); + CreateAndAddIndexedItem(header_->TypeIds(), + header_->TypeIds().GetOffset() + i * TypeId::ItemSize(), + i, + header_->StringIds()[disk_type_id.descriptor_idx_.index_]); +} + +void BuilderMaps::CreateProtoId(const DexFile& dex_file, uint32_t i) { + const DexFile::ProtoId& disk_proto_id = dex_file.GetProtoId(dex::ProtoIndex(i)); + const DexFile::TypeList* type_list = dex_file.GetProtoParameters(disk_proto_id); + TypeList* parameter_type_list = CreateTypeList(type_list, disk_proto_id.parameters_off_); + + CreateAndAddIndexedItem(header_->ProtoIds(), + header_->ProtoIds().GetOffset() + i * ProtoId::ItemSize(), + i, + header_->StringIds()[disk_proto_id.shorty_idx_.index_], + header_->TypeIds()[disk_proto_id.return_type_idx_.index_], + parameter_type_list); +} + +void BuilderMaps::CreateFieldId(const DexFile& dex_file, uint32_t i) { + const DexFile::FieldId& disk_field_id = dex_file.GetFieldId(i); + CreateAndAddIndexedItem(header_->FieldIds(), + header_->FieldIds().GetOffset() + i * FieldId::ItemSize(), + i, + header_->TypeIds()[disk_field_id.class_idx_.index_], + header_->TypeIds()[disk_field_id.type_idx_.index_], + header_->StringIds()[disk_field_id.name_idx_.index_]); +} + +void BuilderMaps::CreateMethodId(const DexFile& dex_file, uint32_t i) { + const DexFile::MethodId& disk_method_id = dex_file.GetMethodId(i); + CreateAndAddIndexedItem(header_->MethodIds(), + header_->MethodIds().GetOffset() + i * MethodId::ItemSize(), + i, + header_->TypeIds()[disk_method_id.class_idx_.index_], + header_->ProtoIds()[disk_method_id.proto_idx_.index_], + header_->StringIds()[disk_method_id.name_idx_.index_]); +} + +void BuilderMaps::CreateClassDef(const DexFile& dex_file, uint32_t i) { + const DexFile::ClassDef& disk_class_def = dex_file.GetClassDef(i); + const TypeId* class_type = header_->TypeIds()[disk_class_def.class_idx_.index_]; + uint32_t access_flags = disk_class_def.access_flags_; + const TypeId* superclass = header_->GetTypeIdOrNullPtr(disk_class_def.superclass_idx_.index_); + + const DexFile::TypeList* type_list = dex_file.GetInterfacesList(disk_class_def); + TypeList* interfaces_type_list = CreateTypeList(type_list, disk_class_def.interfaces_off_); + + const StringId* source_file = + header_->GetStringIdOrNullPtr(disk_class_def.source_file_idx_.index_); + // Annotations. + AnnotationsDirectoryItem* annotations = nullptr; + const DexFile::AnnotationsDirectoryItem* disk_annotations_directory_item = + dex_file.GetAnnotationsDirectory(disk_class_def); + if (disk_annotations_directory_item != nullptr) { + annotations = CreateAnnotationsDirectoryItem( + dex_file, disk_annotations_directory_item, disk_class_def.annotations_off_); + } + // Static field initializers. + const uint8_t* static_data = dex_file.GetEncodedStaticFieldValuesArray(disk_class_def); + EncodedArrayItem* static_values = + CreateEncodedArrayItem(dex_file, static_data, disk_class_def.static_values_off_); + ClassData* class_data = CreateClassData( + dex_file, dex_file.GetClassData(disk_class_def), disk_class_def.class_data_off_); + CreateAndAddIndexedItem(header_->ClassDefs(), + header_->ClassDefs().GetOffset() + i * ClassDef::ItemSize(), + i, + class_type, + access_flags, + superclass, + interfaces_type_list, + source_file, + annotations, + static_values, + class_data); +} + +void BuilderMaps::CreateCallSiteId(const DexFile& dex_file, uint32_t i) { + const DexFile::CallSiteIdItem& disk_call_site_id = dex_file.GetCallSiteId(i); + const uint8_t* disk_call_item_ptr = dex_file.DataBegin() + disk_call_site_id.data_off_; + EncodedArrayItem* call_site_item = + CreateEncodedArrayItem(dex_file, disk_call_item_ptr, disk_call_site_id.data_off_); + + CreateAndAddIndexedItem(header_->CallSiteIds(), + header_->CallSiteIds().GetOffset() + i * CallSiteId::ItemSize(), + i, + call_site_item); +} + +void BuilderMaps::CreateMethodHandleItem(const DexFile& dex_file, uint32_t i) { + const DexFile::MethodHandleItem& disk_method_handle = dex_file.GetMethodHandle(i); + uint16_t index = disk_method_handle.field_or_method_idx_; + DexFile::MethodHandleType type = + static_cast<DexFile::MethodHandleType>(disk_method_handle.method_handle_type_); + bool is_invoke = type == DexFile::MethodHandleType::kInvokeStatic || + type == DexFile::MethodHandleType::kInvokeInstance || + type == DexFile::MethodHandleType::kInvokeConstructor || + type == DexFile::MethodHandleType::kInvokeDirect || + type == DexFile::MethodHandleType::kInvokeInterface; + static_assert(DexFile::MethodHandleType::kLast == DexFile::MethodHandleType::kInvokeInterface, + "Unexpected method handle types."); + IndexedItem* field_or_method_id; + if (is_invoke) { + field_or_method_id = header_->MethodIds()[index]; + } else { + field_or_method_id = header_->FieldIds()[index]; + } + CreateAndAddIndexedItem(header_->MethodHandleItems(), + header_->MethodHandleItems().GetOffset() + + i * MethodHandleItem::ItemSize(), + i, + type, + field_or_method_id); +} + +void BuilderMaps::CreateCallSitesAndMethodHandles(const DexFile& dex_file) { + // Iterate through the map list and set the offset of the CallSiteIds and MethodHandleItems. + const DexFile::MapList* map = dex_file.GetMapList(); + for (uint32_t i = 0; i < map->size_; ++i) { + const DexFile::MapItem* item = map->list_ + i; + switch (item->type_) { + case DexFile::kDexTypeCallSiteIdItem: + header_->CallSiteIds().SetOffset(item->offset_); + break; + case DexFile::kDexTypeMethodHandleItem: + header_->MethodHandleItems().SetOffset(item->offset_); + break; + default: + break; + } + } + // Populate MethodHandleItems first (CallSiteIds may depend on them). + for (uint32_t i = 0; i < dex_file.NumMethodHandles(); i++) { + CreateMethodHandleItem(dex_file, i); + } + // Populate CallSiteIds. + for (uint32_t i = 0; i < dex_file.NumCallSiteIds(); i++) { + CreateCallSiteId(dex_file, i); + } +} + +TypeList* BuilderMaps::CreateTypeList(const DexFile::TypeList* dex_type_list, uint32_t offset) { + if (dex_type_list == nullptr) { + return nullptr; + } + TypeList* type_list = type_lists_map_.GetExistingObject(offset); + if (type_list == nullptr) { + TypeIdVector* type_vector = new TypeIdVector(); + uint32_t size = dex_type_list->Size(); + for (uint32_t index = 0; index < size; ++index) { + type_vector->push_back(header_->TypeIds()[ + dex_type_list->GetTypeItem(index).type_idx_.index_]); + } + type_list = type_lists_map_.CreateAndAddItem(header_->TypeLists(), + eagerly_assign_offsets_, + offset, + type_vector); + } + return type_list; +} + +EncodedArrayItem* BuilderMaps::CreateEncodedArrayItem(const DexFile& dex_file, + const uint8_t* static_data, + uint32_t offset) { + if (static_data == nullptr) { + return nullptr; + } + EncodedArrayItem* encoded_array_item = encoded_array_items_map_.GetExistingObject(offset); + if (encoded_array_item == nullptr) { + uint32_t size = DecodeUnsignedLeb128(&static_data); + EncodedValueVector* values = new EncodedValueVector(); + for (uint32_t i = 0; i < size; ++i) { + values->push_back(std::unique_ptr<EncodedValue>(ReadEncodedValue(dex_file, &static_data))); + } + // TODO: Calculate the size of the encoded array. + encoded_array_item = encoded_array_items_map_.CreateAndAddItem(header_->EncodedArrayItems(), + eagerly_assign_offsets_, + offset, + values); + } + return encoded_array_item; +} + +void BuilderMaps::AddAnnotationsFromMapListSection(const DexFile& dex_file, + uint32_t start_offset, + uint32_t count) { + uint32_t current_offset = start_offset; + for (size_t i = 0; i < count; ++i) { + // Annotation that we didn't process already, add it to the set. + const DexFile::AnnotationItem* annotation = dex_file.GetAnnotationItemAtOffset(current_offset); + AnnotationItem* annotation_item = CreateAnnotationItem(dex_file, annotation); + DCHECK(annotation_item != nullptr); + current_offset += annotation_item->GetSize(); + } +} + +AnnotationItem* BuilderMaps::CreateAnnotationItem(const DexFile& dex_file, + const DexFile::AnnotationItem* annotation) { + const uint8_t* const start_data = reinterpret_cast<const uint8_t*>(annotation); + const uint32_t offset = start_data - dex_file.DataBegin(); + AnnotationItem* annotation_item = annotation_items_map_.GetExistingObject(offset); + if (annotation_item == nullptr) { + uint8_t visibility = annotation->visibility_; + const uint8_t* annotation_data = annotation->annotation_; + std::unique_ptr<EncodedValue> encoded_value( + ReadEncodedValue(dex_file, &annotation_data, DexFile::kDexAnnotationAnnotation, 0)); + annotation_item = + annotation_items_map_.CreateAndAddItem(header_->AnnotationItems(), + eagerly_assign_offsets_, + offset, + visibility, + encoded_value->ReleaseEncodedAnnotation()); + annotation_item->SetSize(annotation_data - start_data); + } + return annotation_item; +} + + +AnnotationSetItem* BuilderMaps::CreateAnnotationSetItem(const DexFile& dex_file, + const DexFile::AnnotationSetItem* disk_annotations_item, uint32_t offset) { + if (disk_annotations_item == nullptr || (disk_annotations_item->size_ == 0 && offset == 0)) { + return nullptr; + } + AnnotationSetItem* annotation_set_item = annotation_set_items_map_.GetExistingObject(offset); + if (annotation_set_item == nullptr) { + std::vector<AnnotationItem*>* items = new std::vector<AnnotationItem*>(); + for (uint32_t i = 0; i < disk_annotations_item->size_; ++i) { + const DexFile::AnnotationItem* annotation = + dex_file.GetAnnotationItem(disk_annotations_item, i); + if (annotation == nullptr) { + continue; + } + AnnotationItem* annotation_item = CreateAnnotationItem(dex_file, annotation); + items->push_back(annotation_item); + } + annotation_set_item = + annotation_set_items_map_.CreateAndAddItem(header_->AnnotationSetItems(), + eagerly_assign_offsets_, + offset, + items); + } + return annotation_set_item; +} + +AnnotationsDirectoryItem* BuilderMaps::CreateAnnotationsDirectoryItem(const DexFile& dex_file, + const DexFile::AnnotationsDirectoryItem* disk_annotations_item, uint32_t offset) { + AnnotationsDirectoryItem* annotations_directory_item = + annotations_directory_items_map_.GetExistingObject(offset); + if (annotations_directory_item != nullptr) { + return annotations_directory_item; + } + const DexFile::AnnotationSetItem* class_set_item = + dex_file.GetClassAnnotationSet(disk_annotations_item); + AnnotationSetItem* class_annotation = nullptr; + if (class_set_item != nullptr) { + uint32_t item_offset = disk_annotations_item->class_annotations_off_; + class_annotation = CreateAnnotationSetItem(dex_file, class_set_item, item_offset); + } + const DexFile::FieldAnnotationsItem* fields = + dex_file.GetFieldAnnotations(disk_annotations_item); + FieldAnnotationVector* field_annotations = nullptr; + if (fields != nullptr) { + field_annotations = new FieldAnnotationVector(); + for (uint32_t i = 0; i < disk_annotations_item->fields_size_; ++i) { + FieldId* field_id = header_->FieldIds()[fields[i].field_idx_]; + const DexFile::AnnotationSetItem* field_set_item = + dex_file.GetFieldAnnotationSetItem(fields[i]); + uint32_t annotation_set_offset = fields[i].annotations_off_; + AnnotationSetItem* annotation_set_item = + CreateAnnotationSetItem(dex_file, field_set_item, annotation_set_offset); + field_annotations->push_back(std::unique_ptr<FieldAnnotation>( + new FieldAnnotation(field_id, annotation_set_item))); + } + } + const DexFile::MethodAnnotationsItem* methods = + dex_file.GetMethodAnnotations(disk_annotations_item); + MethodAnnotationVector* method_annotations = nullptr; + if (methods != nullptr) { + method_annotations = new MethodAnnotationVector(); + for (uint32_t i = 0; i < disk_annotations_item->methods_size_; ++i) { + MethodId* method_id = header_->MethodIds()[methods[i].method_idx_]; + const DexFile::AnnotationSetItem* method_set_item = + dex_file.GetMethodAnnotationSetItem(methods[i]); + uint32_t annotation_set_offset = methods[i].annotations_off_; + AnnotationSetItem* annotation_set_item = + CreateAnnotationSetItem(dex_file, method_set_item, annotation_set_offset); + method_annotations->push_back(std::unique_ptr<MethodAnnotation>( + new MethodAnnotation(method_id, annotation_set_item))); + } + } + const DexFile::ParameterAnnotationsItem* parameters = + dex_file.GetParameterAnnotations(disk_annotations_item); + ParameterAnnotationVector* parameter_annotations = nullptr; + if (parameters != nullptr) { + parameter_annotations = new ParameterAnnotationVector(); + for (uint32_t i = 0; i < disk_annotations_item->parameters_size_; ++i) { + MethodId* method_id = header_->MethodIds()[parameters[i].method_idx_]; + const DexFile::AnnotationSetRefList* list = + dex_file.GetParameterAnnotationSetRefList(¶meters[i]); + parameter_annotations->push_back(std::unique_ptr<ParameterAnnotation>( + GenerateParameterAnnotation(dex_file, method_id, list, parameters[i].annotations_off_))); + } + } + // TODO: Calculate the size of the annotations directory. + return annotations_directory_items_map_.CreateAndAddItem(header_->AnnotationsDirectoryItems(), + eagerly_assign_offsets_, + offset, + class_annotation, + field_annotations, + method_annotations, + parameter_annotations); +} + +CodeItem* BuilderMaps::DedupeOrCreateCodeItem(const DexFile& dex_file, + const DexFile::CodeItem* disk_code_item, + uint32_t offset, + uint32_t dex_method_index) { + if (disk_code_item == nullptr) { + return nullptr; + } + CodeItemDebugInfoAccessor accessor(dex_file, disk_code_item, dex_method_index); + const uint32_t debug_info_offset = accessor.DebugInfoOffset(); + + // Create the offsets pair and dedupe based on it. + std::pair<uint32_t, uint32_t> offsets_pair(offset, debug_info_offset); + auto existing = code_items_map_.find(offsets_pair); + if (existing != code_items_map_.end()) { + return existing->second; + } + + const uint8_t* debug_info_stream = dex_file.GetDebugInfoStream(debug_info_offset); + DebugInfoItem* debug_info = nullptr; + if (debug_info_stream != nullptr) { + debug_info = debug_info_items_map_.GetExistingObject(debug_info_offset); + if (debug_info == nullptr) { + uint32_t debug_info_size = GetDebugInfoStreamSize(debug_info_stream); + uint8_t* debug_info_buffer = new uint8_t[debug_info_size]; + memcpy(debug_info_buffer, debug_info_stream, debug_info_size); + debug_info = debug_info_items_map_.CreateAndAddItem(header_->DebugInfoItems(), + eagerly_assign_offsets_, + debug_info_offset, + debug_info_size, + debug_info_buffer); + } + } + + uint32_t insns_size = accessor.InsnsSizeInCodeUnits(); + uint16_t* insns = new uint16_t[insns_size]; + memcpy(insns, accessor.Insns(), insns_size * sizeof(uint16_t)); + + TryItemVector* tries = nullptr; + CatchHandlerVector* handler_list = nullptr; + if (accessor.TriesSize() > 0) { + tries = new TryItemVector(); + handler_list = new CatchHandlerVector(); + for (const DexFile::TryItem& disk_try_item : accessor.TryItems()) { + uint32_t start_addr = disk_try_item.start_addr_; + uint16_t insn_count = disk_try_item.insn_count_; + uint16_t handler_off = disk_try_item.handler_off_; + const CatchHandler* handlers = nullptr; + for (std::unique_ptr<const CatchHandler>& existing_handlers : *handler_list) { + if (handler_off == existing_handlers->GetListOffset()) { + handlers = existing_handlers.get(); + break; + } + } + if (handlers == nullptr) { + bool catch_all = false; + TypeAddrPairVector* addr_pairs = new TypeAddrPairVector(); + for (CatchHandlerIterator it(accessor, disk_try_item); it.HasNext(); it.Next()) { + const dex::TypeIndex type_index = it.GetHandlerTypeIndex(); + const TypeId* type_id = header_->GetTypeIdOrNullPtr(type_index.index_); + catch_all |= type_id == nullptr; + addr_pairs->push_back(std::unique_ptr<const TypeAddrPair>( + new TypeAddrPair(type_id, it.GetHandlerAddress()))); + } + handlers = new CatchHandler(catch_all, handler_off, addr_pairs); + handler_list->push_back(std::unique_ptr<const CatchHandler>(handlers)); + } + TryItem* try_item = new TryItem(start_addr, insn_count, handlers); + tries->push_back(std::unique_ptr<const TryItem>(try_item)); + } + // Manually walk catch handlers list and add any missing handlers unreferenced by try items. + const uint8_t* handlers_base = accessor.GetCatchHandlerData(); + const uint8_t* handlers_data = handlers_base; + uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_data); + while (handlers_size > handler_list->size()) { + bool already_added = false; + uint16_t handler_off = handlers_data - handlers_base; + for (std::unique_ptr<const CatchHandler>& existing_handlers : *handler_list) { + if (handler_off == existing_handlers->GetListOffset()) { + already_added = true; + break; + } + } + int32_t size = DecodeSignedLeb128(&handlers_data); + bool has_catch_all = size <= 0; + if (has_catch_all) { + size = -size; + } + if (already_added) { + for (int32_t i = 0; i < size; i++) { + DecodeUnsignedLeb128(&handlers_data); + DecodeUnsignedLeb128(&handlers_data); + } + if (has_catch_all) { + DecodeUnsignedLeb128(&handlers_data); + } + continue; + } + TypeAddrPairVector* addr_pairs = new TypeAddrPairVector(); + for (int32_t i = 0; i < size; i++) { + const TypeId* type_id = + header_->GetTypeIdOrNullPtr(DecodeUnsignedLeb128(&handlers_data)); + uint32_t addr = DecodeUnsignedLeb128(&handlers_data); + addr_pairs->push_back( + std::unique_ptr<const TypeAddrPair>(new TypeAddrPair(type_id, addr))); + } + if (has_catch_all) { + uint32_t addr = DecodeUnsignedLeb128(&handlers_data); + addr_pairs->push_back( + std::unique_ptr<const TypeAddrPair>(new TypeAddrPair(nullptr, addr))); + } + const CatchHandler* handler = new CatchHandler(has_catch_all, handler_off, addr_pairs); + handler_list->push_back(std::unique_ptr<const CatchHandler>(handler)); + } + } + + uint32_t size = dex_file.GetCodeItemSize(*disk_code_item); + CodeItem* code_item = header_->CodeItems().CreateAndAddItem(accessor.RegistersSize(), + accessor.InsSize(), + accessor.OutsSize(), + debug_info, + insns_size, + insns, + tries, + handler_list); + code_item->SetSize(size); + + // Add the code item to the map. + DCHECK(!code_item->OffsetAssigned()); + if (eagerly_assign_offsets_) { + code_item->SetOffset(offset); + } + code_items_map_.emplace(offsets_pair, code_item); + + // Add "fixup" references to types, strings, methods, and fields. + // This is temporary, as we will probably want more detailed parsing of the + // instructions here. + std::vector<TypeId*> type_ids; + std::vector<StringId*> string_ids; + std::vector<MethodId*> method_ids; + std::vector<FieldId*> field_ids; + if (GetIdsFromByteCode(code_item, + /*out*/ &type_ids, + /*out*/ &string_ids, + /*out*/ &method_ids, + /*out*/ &field_ids)) { + CodeFixups* fixups = new CodeFixups(std::move(type_ids), + std::move(string_ids), + std::move(method_ids), + std::move(field_ids)); + code_item->SetCodeFixups(fixups); + } + + return code_item; +} + +ClassData* BuilderMaps::CreateClassData( + const DexFile& dex_file, const uint8_t* encoded_data, uint32_t offset) { + // Read the fields and methods defined by the class, resolving the circular reference from those + // to classes by setting class at the same time. + ClassData* class_data = class_datas_map_.GetExistingObject(offset); + if (class_data == nullptr && encoded_data != nullptr) { + ClassDataItemIterator cdii(dex_file, encoded_data); + // Static fields. + FieldItemVector* static_fields = new FieldItemVector(); + for (; cdii.HasNextStaticField(); cdii.Next()) { + FieldId* field_item = header_->FieldIds()[cdii.GetMemberIndex()]; + uint32_t access_flags = cdii.GetRawMemberAccessFlags(); + static_fields->emplace_back(access_flags, field_item); + } + // Instance fields. + FieldItemVector* instance_fields = new FieldItemVector(); + for (; cdii.HasNextInstanceField(); cdii.Next()) { + FieldId* field_item = header_->FieldIds()[cdii.GetMemberIndex()]; + uint32_t access_flags = cdii.GetRawMemberAccessFlags(); + instance_fields->emplace_back(access_flags, field_item); + } + // Direct methods. + MethodItemVector* direct_methods = new MethodItemVector(); + for (; cdii.HasNextDirectMethod(); cdii.Next()) { + direct_methods->push_back(GenerateMethodItem(dex_file, cdii)); + } + // Virtual methods. + MethodItemVector* virtual_methods = new MethodItemVector(); + for (; cdii.HasNextVirtualMethod(); cdii.Next()) { + virtual_methods->push_back(GenerateMethodItem(dex_file, cdii)); + } + class_data = class_datas_map_.CreateAndAddItem(header_->ClassDatas(), + eagerly_assign_offsets_, + offset, + static_fields, + instance_fields, + direct_methods, + virtual_methods); + class_data->SetSize(cdii.EndDataPointer() - encoded_data); + } + return class_data; +} + +void BuilderMaps::SortVectorsByMapOrder() { + header_->StringDatas().SortByMapOrder(string_datas_map_.Collection()); + header_->TypeLists().SortByMapOrder(type_lists_map_.Collection()); + header_->EncodedArrayItems().SortByMapOrder(encoded_array_items_map_.Collection()); + header_->AnnotationItems().SortByMapOrder(annotation_items_map_.Collection()); + header_->AnnotationSetItems().SortByMapOrder(annotation_set_items_map_.Collection()); + header_->AnnotationSetRefLists().SortByMapOrder(annotation_set_ref_lists_map_.Collection()); + header_->AnnotationsDirectoryItems().SortByMapOrder( + annotations_directory_items_map_.Collection()); + header_->DebugInfoItems().SortByMapOrder(debug_info_items_map_.Collection()); + header_->CodeItems().SortByMapOrder(code_items_map_); + header_->ClassDatas().SortByMapOrder(class_datas_map_.Collection()); +} + +bool BuilderMaps::GetIdsFromByteCode(const CodeItem* code, + std::vector<TypeId*>* type_ids, + std::vector<StringId*>* string_ids, + std::vector<MethodId*>* method_ids, + std::vector<FieldId*>* field_ids) { + bool has_id = false; + IterationRange<DexInstructionIterator> instructions = code->Instructions(); + SafeDexInstructionIterator it(instructions.begin(), instructions.end()); + for (; !it.IsErrorState() && it < instructions.end(); ++it) { + // In case the instruction goes past the end of the code item, make sure to not process it. + SafeDexInstructionIterator next = it; + ++next; + if (next.IsErrorState()) { + break; + } + has_id |= GetIdFromInstruction(&it.Inst(), type_ids, string_ids, method_ids, field_ids); + } // for + return has_id; +} + +bool BuilderMaps::GetIdFromInstruction(const Instruction* dec_insn, + std::vector<TypeId*>* type_ids, + std::vector<StringId*>* string_ids, + std::vector<MethodId*>* method_ids, + std::vector<FieldId*>* field_ids) { + // Determine index and width of the string. + uint32_t index = 0; + switch (Instruction::FormatOf(dec_insn->Opcode())) { + // SOME NOT SUPPORTED: + // case Instruction::k20bc: + case Instruction::k21c: + case Instruction::k35c: + // case Instruction::k35ms: + case Instruction::k3rc: + // case Instruction::k3rms: + // case Instruction::k35mi: + // case Instruction::k3rmi: + case Instruction::k45cc: + case Instruction::k4rcc: + index = dec_insn->VRegB(); + break; + case Instruction::k31c: + index = dec_insn->VRegB(); + break; + case Instruction::k22c: + // case Instruction::k22cs: + index = dec_insn->VRegC(); + break; + default: + break; + } // switch + + // Determine index type, and add reference to the appropriate collection. + switch (Instruction::IndexTypeOf(dec_insn->Opcode())) { + case Instruction::kIndexTypeRef: + if (index < header_->TypeIds().Size()) { + type_ids->push_back(header_->TypeIds()[index]); + return true; + } + break; + case Instruction::kIndexStringRef: + if (index < header_->StringIds().Size()) { + string_ids->push_back(header_->StringIds()[index]); + return true; + } + break; + case Instruction::kIndexMethodRef: + case Instruction::kIndexMethodAndProtoRef: + if (index < header_->MethodIds().Size()) { + method_ids->push_back(header_->MethodIds()[index]); + return true; + } + break; + case Instruction::kIndexFieldRef: + if (index < header_->FieldIds().Size()) { + field_ids->push_back(header_->FieldIds()[index]); + return true; + } + break; + case Instruction::kIndexUnknown: + case Instruction::kIndexNone: + case Instruction::kIndexVtableOffset: + case Instruction::kIndexFieldOffset: + default: + break; + } // switch + return false; +} + +EncodedValue* BuilderMaps::ReadEncodedValue(const DexFile& dex_file, const uint8_t** data) { + const uint8_t encoded_value = *(*data)++; + const uint8_t type = encoded_value & 0x1f; + EncodedValue* item = new EncodedValue(type); + ReadEncodedValue(dex_file, data, type, encoded_value >> 5, item); + return item; +} + +EncodedValue* BuilderMaps::ReadEncodedValue(const DexFile& dex_file, + const uint8_t** data, + uint8_t type, + uint8_t length) { + EncodedValue* item = new EncodedValue(type); + ReadEncodedValue(dex_file, data, type, length, item); + return item; +} + +void BuilderMaps::ReadEncodedValue(const DexFile& dex_file, + const uint8_t** data, + uint8_t type, + uint8_t length, + EncodedValue* item) { + switch (type) { + case DexFile::kDexAnnotationByte: + item->SetByte(static_cast<int8_t>(ReadVarWidth(data, length, false))); + break; + case DexFile::kDexAnnotationShort: + item->SetShort(static_cast<int16_t>(ReadVarWidth(data, length, true))); + break; + case DexFile::kDexAnnotationChar: + item->SetChar(static_cast<uint16_t>(ReadVarWidth(data, length, false))); + break; + case DexFile::kDexAnnotationInt: + item->SetInt(static_cast<int32_t>(ReadVarWidth(data, length, true))); + break; + case DexFile::kDexAnnotationLong: + item->SetLong(static_cast<int64_t>(ReadVarWidth(data, length, true))); + break; + case DexFile::kDexAnnotationFloat: { + // Fill on right. + union { + float f; + uint32_t data; + } conv; + conv.data = static_cast<uint32_t>(ReadVarWidth(data, length, false)) << (3 - length) * 8; + item->SetFloat(conv.f); + break; + } + case DexFile::kDexAnnotationDouble: { + // Fill on right. + union { + double d; + uint64_t data; + } conv; + conv.data = ReadVarWidth(data, length, false) << (7 - length) * 8; + item->SetDouble(conv.d); + break; + } + case DexFile::kDexAnnotationMethodType: { + const uint32_t proto_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); + item->SetProtoId(header_->ProtoIds()[proto_index]); + break; + } + case DexFile::kDexAnnotationMethodHandle: { + const uint32_t method_handle_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); + item->SetMethodHandle(header_->MethodHandleItems()[method_handle_index]); + break; + } + case DexFile::kDexAnnotationString: { + const uint32_t string_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); + item->SetStringId(header_->StringIds()[string_index]); + break; + } + case DexFile::kDexAnnotationType: { + const uint32_t string_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); + item->SetTypeId(header_->TypeIds()[string_index]); + break; + } + case DexFile::kDexAnnotationField: + case DexFile::kDexAnnotationEnum: { + const uint32_t field_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); + item->SetFieldId(header_->FieldIds()[field_index]); + break; + } + case DexFile::kDexAnnotationMethod: { + const uint32_t method_index = static_cast<uint32_t>(ReadVarWidth(data, length, false)); + item->SetMethodId(header_->MethodIds()[method_index]); + break; + } + case DexFile::kDexAnnotationArray: { + EncodedValueVector* values = new EncodedValueVector(); + const uint32_t offset = *data - dex_file.DataBegin(); + const uint32_t size = DecodeUnsignedLeb128(data); + // Decode all elements. + for (uint32_t i = 0; i < size; i++) { + values->push_back(std::unique_ptr<EncodedValue>(ReadEncodedValue(dex_file, data))); + } + EncodedArrayItem* array_item = new EncodedArrayItem(values); + if (eagerly_assign_offsets_) { + array_item->SetOffset(offset); + } + item->SetEncodedArray(array_item); + break; + } + case DexFile::kDexAnnotationAnnotation: { + AnnotationElementVector* elements = new AnnotationElementVector(); + const uint32_t type_idx = DecodeUnsignedLeb128(data); + const uint32_t size = DecodeUnsignedLeb128(data); + // Decode all name=value pairs. + for (uint32_t i = 0; i < size; i++) { + const uint32_t name_index = DecodeUnsignedLeb128(data); + elements->push_back(std::unique_ptr<AnnotationElement>( + new AnnotationElement(header_->StringIds()[name_index], + ReadEncodedValue(dex_file, data)))); + } + item->SetEncodedAnnotation(new EncodedAnnotation(header_->TypeIds()[type_idx], elements)); + break; + } + case DexFile::kDexAnnotationNull: + break; + case DexFile::kDexAnnotationBoolean: + item->SetBoolean(length != 0); + break; + default: + break; + } +} + +MethodItem BuilderMaps::GenerateMethodItem(const DexFile& dex_file, ClassDataItemIterator& cdii) { + MethodId* method_id = header_->MethodIds()[cdii.GetMemberIndex()]; + uint32_t access_flags = cdii.GetRawMemberAccessFlags(); + const DexFile::CodeItem* disk_code_item = cdii.GetMethodCodeItem(); + // Temporary hack to prevent incorrectly deduping code items if they have the same offset since + // they may have different debug info streams. + CodeItem* code_item = DedupeOrCreateCodeItem(dex_file, + disk_code_item, + cdii.GetMethodCodeItemOffset(), + cdii.GetMemberIndex()); + return MethodItem(access_flags, method_id, code_item); +} + +ParameterAnnotation* BuilderMaps::GenerateParameterAnnotation( + const DexFile& dex_file, + MethodId* method_id, + const DexFile::AnnotationSetRefList* annotation_set_ref_list, + uint32_t offset) { + AnnotationSetRefList* set_ref_list = annotation_set_ref_lists_map_.GetExistingObject(offset); + if (set_ref_list == nullptr) { + std::vector<AnnotationSetItem*>* annotations = new std::vector<AnnotationSetItem*>(); + for (uint32_t i = 0; i < annotation_set_ref_list->size_; ++i) { + const DexFile::AnnotationSetItem* annotation_set_item = + dex_file.GetSetRefItemItem(&annotation_set_ref_list->list_[i]); + uint32_t set_offset = annotation_set_ref_list->list_[i].annotations_off_; + annotations->push_back(CreateAnnotationSetItem(dex_file, annotation_set_item, set_offset)); + } + set_ref_list = + annotation_set_ref_lists_map_.CreateAndAddItem(header_->AnnotationSetRefLists(), + eagerly_assign_offsets_, + offset, + annotations); + } + return new ParameterAnnotation(method_id, set_ref_list); +} + } // namespace dex_ir } // namespace art diff --git a/dexlayout/dex_verify.cc b/dexlayout/dex_verify.cc index 2e4756b482..718d66feaa 100644 --- a/dexlayout/dex_verify.cc +++ b/dexlayout/dex_verify.cc @@ -31,38 +31,42 @@ using android::base::StringPrintf; bool VerifyOutputDexFile(dex_ir::Header* orig_header, dex_ir::Header* output_header, std::string* error_msg) { - dex_ir::Collections& orig = orig_header->GetCollections(); - dex_ir::Collections& output = output_header->GetCollections(); - // Compare all id sections. They have a defined order that can't be changed by dexlayout. - if (!VerifyIds(orig.StringIds(), output.StringIds(), "string ids", error_msg) || - !VerifyIds(orig.TypeIds(), output.TypeIds(), "type ids", error_msg) || - !VerifyIds(orig.ProtoIds(), output.ProtoIds(), "proto ids", error_msg) || - !VerifyIds(orig.FieldIds(), output.FieldIds(), "field ids", error_msg) || - !VerifyIds(orig.MethodIds(), output.MethodIds(), "method ids", error_msg)) { + if (!VerifyIds(orig_header->StringIds(), output_header->StringIds(), "string ids", error_msg) || + !VerifyIds(orig_header->TypeIds(), output_header->TypeIds(), "type ids", error_msg) || + !VerifyIds(orig_header->ProtoIds(), output_header->ProtoIds(), "proto ids", error_msg) || + !VerifyIds(orig_header->FieldIds(), output_header->FieldIds(), "field ids", error_msg) || + !VerifyIds(orig_header->MethodIds(), output_header->MethodIds(), "method ids", error_msg)) { return false; } // Compare class defs. The order may have been changed by dexlayout. - if (!VerifyClassDefs(orig.ClassDefs(), output.ClassDefs(), error_msg)) { + if (!VerifyClassDefs(orig_header->ClassDefs(), output_header->ClassDefs(), error_msg)) { return false; } return true; } -template<class T> bool VerifyIds(std::vector<std::unique_ptr<T>>& orig, - std::vector<std::unique_ptr<T>>& output, +template<class T> bool VerifyIds(dex_ir::CollectionVector<T>& orig, + dex_ir::CollectionVector<T>& output, const char* section_name, std::string* error_msg) { - if (orig.size() != output.size()) { - *error_msg = StringPrintf( - "Mismatched size for %s section: %zu vs %zu.", section_name, orig.size(), output.size()); - return false; - } - for (size_t i = 0; i < orig.size(); ++i) { - if (!VerifyId(orig[i].get(), output[i].get(), error_msg)) { + auto orig_iter = orig.begin(); + auto output_iter = output.begin(); + for (; orig_iter != orig.end() && output_iter != output.end(); ++orig_iter, ++output_iter) { + if (!VerifyId(orig_iter->get(), output_iter->get(), error_msg)) { return false; } } + if (orig_iter != orig.end() || output_iter != output.end()) { + const char* longer; + if (orig_iter == orig.end()) { + longer = "output"; + } else { + longer = "original"; + } + *error_msg = StringPrintf("Mismatch for %s section: %s is longer.", section_name, longer); + return false; + } return true; } @@ -181,29 +185,36 @@ struct ClassDefCompare { // The class defs may have a new order due to dexlayout. Use the class's class_idx to uniquely // identify them and sort them for comparison. -bool VerifyClassDefs(std::vector<std::unique_ptr<dex_ir::ClassDef>>& orig, - std::vector<std::unique_ptr<dex_ir::ClassDef>>& output, +bool VerifyClassDefs(dex_ir::CollectionVector<dex_ir::ClassDef>& orig, + dex_ir::CollectionVector<dex_ir::ClassDef>& output, std::string* error_msg) { - if (orig.size() != output.size()) { - *error_msg = StringPrintf( - "Mismatched size for class defs section: %zu vs %zu.", orig.size(), output.size()); - return false; - } // Store the class defs into sets sorted by the class's type index. std::set<dex_ir::ClassDef*, ClassDefCompare> orig_set; std::set<dex_ir::ClassDef*, ClassDefCompare> output_set; - for (size_t i = 0; i < orig.size(); ++i) { - orig_set.insert(orig[i].get()); - output_set.insert(output[i].get()); - } - auto orig_iter = orig_set.begin(); - auto output_iter = output_set.begin(); - while (orig_iter != orig_set.end() && output_iter != output_set.end()) { - if (!VerifyClassDef(*orig_iter, *output_iter, error_msg)) { + auto orig_iter = orig.begin(); + auto output_iter = output.begin(); + for (; orig_iter != orig.end() && output_iter != output.end(); ++orig_iter, ++output_iter) { + orig_set.insert(orig_iter->get()); + output_set.insert(output_iter->get()); + } + if (orig_iter != orig.end() || output_iter != output.end()) { + const char* longer; + if (orig_iter == orig.end()) { + longer = "output"; + } else { + longer = "original"; + } + *error_msg = StringPrintf("Mismatch for class defs section: %s is longer.", longer); + return false; + } + auto orig_set_iter = orig_set.begin(); + auto output_set_iter = output_set.begin(); + while (orig_set_iter != orig_set.end() && output_set_iter != output_set.end()) { + if (!VerifyClassDef(*orig_set_iter, *output_set_iter, error_msg)) { return false; } - orig_iter++; - output_iter++; + orig_set_iter++; + output_set_iter++; } return true; } @@ -907,7 +918,7 @@ bool VerifyDebugInfo(dex_ir::DebugInfoItem* orig, *error_msg = "DebugInfo null/non-null mismatch."; return false; } - if (memcmp(orig_data, output_data, orig_size) != 0) { + if (orig_data != nullptr && memcmp(orig_data, output_data, orig_size) != 0) { *error_msg = "DebugInfo bytes mismatch."; return false; } diff --git a/dexlayout/dex_verify.h b/dexlayout/dex_verify.h index 998939bbce..4943defe16 100644 --- a/dexlayout/dex_verify.h +++ b/dexlayout/dex_verify.h @@ -30,8 +30,8 @@ bool VerifyOutputDexFile(dex_ir::Header* orig_header, dex_ir::Header* output_header, std::string* error_msg); -template<class T> bool VerifyIds(std::vector<std::unique_ptr<T>>& orig, - std::vector<std::unique_ptr<T>>& output, +template<class T> bool VerifyIds(dex_ir::CollectionVector<T>& orig, + dex_ir::CollectionVector<T>& output, const char* section_name, std::string* error_msg); bool VerifyId(dex_ir::StringId* orig, dex_ir::StringId* output, std::string* error_msg); @@ -40,8 +40,8 @@ bool VerifyId(dex_ir::ProtoId* orig, dex_ir::ProtoId* output, std::string* error bool VerifyId(dex_ir::FieldId* orig, dex_ir::FieldId* output, std::string* error_msg); bool VerifyId(dex_ir::MethodId* orig, dex_ir::MethodId* output, std::string* error_msg); -bool VerifyClassDefs(std::vector<std::unique_ptr<dex_ir::ClassDef>>& orig, - std::vector<std::unique_ptr<dex_ir::ClassDef>>& output, +bool VerifyClassDefs(dex_ir::CollectionVector<dex_ir::ClassDef>& orig, + dex_ir::CollectionVector<dex_ir::ClassDef>& output, std::string* error_msg); bool VerifyClassDef(dex_ir::ClassDef* orig, dex_ir::ClassDef* output, std::string* error_msg); diff --git a/dexlayout/dex_visualize.cc b/dexlayout/dex_visualize.cc index 0e04c587e7..abcaffc434 100644 --- a/dexlayout/dex_visualize.cc +++ b/dexlayout/dex_visualize.cc @@ -252,9 +252,9 @@ void VisualizeDexLayout(dex_ir::Header* header, return; } - const uint32_t class_defs_size = header->GetCollections().ClassDefsSize(); + const uint32_t class_defs_size = header->ClassDefs().Size(); for (uint32_t class_index = 0; class_index < class_defs_size; class_index++) { - dex_ir::ClassDef* class_def = header->GetCollections().GetClassDef(class_index); + dex_ir::ClassDef* class_def = header->ClassDefs()[class_index]; dex::TypeIndex type_idx(class_def->ClassType()->GetIndex()); if (profile_info != nullptr && !profile_info->ContainsClass(*dex_file, type_idx)) { continue; diff --git a/dexlayout/dex_writer.cc b/dexlayout/dex_writer.cc index 9ed1312983..a4c5cda4ba 100644 --- a/dexlayout/dex_writer.cc +++ b/dexlayout/dex_writer.cc @@ -231,7 +231,7 @@ void DexWriter::WriteEncodedMethods(Stream* stream, dex_ir::MethodItemVector* me // function that takes a CollectionVector<T> and uses overloading. void DexWriter::WriteStringIds(Stream* stream, bool reserve_only) { const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::StringId>& string_id : header_->GetCollections().StringIds()) { + for (auto& string_id : header_->StringIds()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeStringIdItem)); if (reserve_only) { stream->Skip(string_id->GetSize()); @@ -241,7 +241,7 @@ void DexWriter::WriteStringIds(Stream* stream, bool reserve_only) { } } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetStringIdsOffset(start); + header_->StringIds().SetOffset(start); } } @@ -256,25 +256,25 @@ void DexWriter::WriteStringData(Stream* stream, dex_ir::StringData* string_data) void DexWriter::WriteStringDatas(Stream* stream) { const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::StringData>& string_data : header_->GetCollections().StringDatas()) { + for (auto& string_data : header_->StringDatas()) { WriteStringData(stream, string_data.get()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetStringDatasOffset(start); + header_->StringDatas().SetOffset(start); } } void DexWriter::WriteTypeIds(Stream* stream) { uint32_t descriptor_idx[1]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::TypeId>& type_id : header_->GetCollections().TypeIds()) { + for (auto& type_id : header_->TypeIds()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeTypeIdItem)); ProcessOffset(stream, type_id.get()); descriptor_idx[0] = type_id->GetStringId()->GetIndex(); stream->Write(descriptor_idx, type_id->GetSize()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetTypeIdsOffset(start); + header_->TypeIds().SetOffset(start); } } @@ -282,7 +282,7 @@ void DexWriter::WriteTypeLists(Stream* stream) { uint32_t size[1]; uint16_t list[1]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::TypeList>& type_list : header_->GetCollections().TypeLists()) { + for (auto& type_list : header_->TypeLists()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeTypeList)); size[0] = type_list->GetTypeList()->size(); ProcessOffset(stream, type_list.get()); @@ -293,14 +293,14 @@ void DexWriter::WriteTypeLists(Stream* stream) { } } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetTypeListsOffset(start); + header_->TypeLists().SetOffset(start); } } void DexWriter::WriteProtoIds(Stream* stream, bool reserve_only) { uint32_t buffer[3]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::ProtoId>& proto_id : header_->GetCollections().ProtoIds()) { + for (auto& proto_id : header_->ProtoIds()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeProtoIdItem)); ProcessOffset(stream, proto_id.get()); if (reserve_only) { @@ -313,14 +313,14 @@ void DexWriter::WriteProtoIds(Stream* stream, bool reserve_only) { } } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetProtoIdsOffset(start); + header_->ProtoIds().SetOffset(start); } } void DexWriter::WriteFieldIds(Stream* stream) { uint16_t buffer[4]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::FieldId>& field_id : header_->GetCollections().FieldIds()) { + for (auto& field_id : header_->FieldIds()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeFieldIdItem)); ProcessOffset(stream, field_id.get()); buffer[0] = field_id->Class()->GetIndex(); @@ -330,14 +330,14 @@ void DexWriter::WriteFieldIds(Stream* stream) { stream->Write(buffer, field_id->GetSize()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetFieldIdsOffset(start); + header_->FieldIds().SetOffset(start); } } void DexWriter::WriteMethodIds(Stream* stream) { uint16_t buffer[4]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::MethodId>& method_id : header_->GetCollections().MethodIds()) { + for (auto& method_id : header_->MethodIds()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeMethodIdItem)); ProcessOffset(stream, method_id.get()); buffer[0] = method_id->Class()->GetIndex(); @@ -347,28 +347,26 @@ void DexWriter::WriteMethodIds(Stream* stream) { stream->Write(buffer, method_id->GetSize()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetMethodIdsOffset(start); + header_->MethodIds().SetOffset(start); } } void DexWriter::WriteEncodedArrays(Stream* stream) { const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::EncodedArrayItem>& encoded_array : - header_->GetCollections().EncodedArrayItems()) { + for (auto& encoded_array : header_->EncodedArrayItems()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeEncodedArrayItem)); ProcessOffset(stream, encoded_array.get()); WriteEncodedArray(stream, encoded_array->GetEncodedValues()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetEncodedArrayItemsOffset(start); + header_->EncodedArrayItems().SetOffset(start); } } void DexWriter::WriteAnnotations(Stream* stream) { uint8_t visibility[1]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::AnnotationItem>& annotation : - header_->GetCollections().AnnotationItems()) { + for (auto& annotation : header_->AnnotationItems()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeAnnotationItem)); visibility[0] = annotation->GetVisibility(); ProcessOffset(stream, annotation.get()); @@ -376,7 +374,7 @@ void DexWriter::WriteAnnotations(Stream* stream) { WriteEncodedAnnotation(stream, annotation->GetAnnotation()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetAnnotationItemsOffset(start); + header_->AnnotationItems().SetOffset(start); } } @@ -384,8 +382,7 @@ void DexWriter::WriteAnnotationSets(Stream* stream) { uint32_t size[1]; uint32_t annotation_off[1]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::AnnotationSetItem>& annotation_set : - header_->GetCollections().AnnotationSetItems()) { + for (auto& annotation_set : header_->AnnotationSetItems()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeAnnotationSetItem)); size[0] = annotation_set->GetItems()->size(); ProcessOffset(stream, annotation_set.get()); @@ -396,7 +393,7 @@ void DexWriter::WriteAnnotationSets(Stream* stream) { } } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetAnnotationSetItemsOffset(start); + header_->AnnotationSetItems().SetOffset(start); } } @@ -404,8 +401,7 @@ void DexWriter::WriteAnnotationSetRefs(Stream* stream) { uint32_t size[1]; uint32_t annotations_off[1]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::AnnotationSetRefList>& annotation_set_ref : - header_->GetCollections().AnnotationSetRefLists()) { + for (auto& annotation_set_ref : header_->AnnotationSetRefLists()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeAnnotationSetRefList)); size[0] = annotation_set_ref->GetItems()->size(); ProcessOffset(stream, annotation_set_ref.get()); @@ -416,7 +412,7 @@ void DexWriter::WriteAnnotationSetRefs(Stream* stream) { } } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetAnnotationSetRefListsOffset(start); + header_->AnnotationSetRefLists().SetOffset(start); } } @@ -424,8 +420,7 @@ void DexWriter::WriteAnnotationsDirectories(Stream* stream) { uint32_t directory_buffer[4]; uint32_t annotation_buffer[2]; const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::AnnotationsDirectoryItem>& annotations_directory : - header_->GetCollections().AnnotationsDirectoryItems()) { + for (auto& annotations_directory : header_->AnnotationsDirectoryItems()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeAnnotationsDirectoryItem)); ProcessOffset(stream, annotations_directory.get()); directory_buffer[0] = annotations_directory->GetClassAnnotation() == nullptr ? 0 : @@ -463,7 +458,7 @@ void DexWriter::WriteAnnotationsDirectories(Stream* stream) { } } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetAnnotationsDirectoryItemsOffset(start); + header_->AnnotationsDirectoryItems().SetOffset(start); } } @@ -475,12 +470,11 @@ void DexWriter::WriteDebugInfoItem(Stream* stream, dex_ir::DebugInfoItem* debug_ void DexWriter::WriteDebugInfoItems(Stream* stream) { const uint32_t start = stream->Tell(); - for (std::unique_ptr<dex_ir::DebugInfoItem>& debug_info : - header_->GetCollections().DebugInfoItems()) { + for (auto& debug_info : header_->DebugInfoItems()) { WriteDebugInfoItem(stream, debug_info.get()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetDebugInfoItemsOffset(start); + header_->DebugInfoItems().SetOffset(start); } } @@ -558,7 +552,7 @@ void DexWriter::WriteCodeItems(Stream* stream, bool reserve_only) { DexLayoutSections::SectionType::kSectionTypeCode)]; } const uint32_t start = stream->Tell(); - for (auto& code_item : header_->GetCollections().CodeItems()) { + for (auto& code_item : header_->CodeItems()) { uint32_t start_offset = stream->Tell(); WriteCodeItem(stream, code_item.get(), reserve_only); // Only add the section hotness info once. @@ -573,14 +567,14 @@ void DexWriter::WriteCodeItems(Stream* stream, bool reserve_only) { } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetCodeItemsOffset(start); + header_->CodeItems().SetOffset(start); } } void DexWriter::WriteClassDefs(Stream* stream, bool reserve_only) { const uint32_t start = stream->Tell(); uint32_t class_def_buffer[8]; - for (std::unique_ptr<dex_ir::ClassDef>& class_def : header_->GetCollections().ClassDefs()) { + for (auto& class_def : header_->ClassDefs()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeClassDefItem)); if (reserve_only) { stream->Skip(class_def->GetSize()); @@ -602,14 +596,14 @@ void DexWriter::WriteClassDefs(Stream* stream, bool reserve_only) { } } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetClassDefsOffset(start); + header_->ClassDefs().SetOffset(start); } } void DexWriter::WriteClassDatas(Stream* stream) { const uint32_t start = stream->Tell(); for (const std::unique_ptr<dex_ir::ClassData>& class_data : - header_->GetCollections().ClassDatas()) { + header_->ClassDatas()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeClassDataItem)); ProcessOffset(stream, class_data.get()); stream->WriteUleb128(class_data->StaticFields()->size()); @@ -622,15 +616,14 @@ void DexWriter::WriteClassDatas(Stream* stream) { WriteEncodedMethods(stream, class_data->VirtualMethods()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetClassDatasOffset(start); + header_->ClassDatas().SetOffset(start); } } void DexWriter::WriteCallSiteIds(Stream* stream, bool reserve_only) { const uint32_t start = stream->Tell(); uint32_t call_site_off[1]; - for (std::unique_ptr<dex_ir::CallSiteId>& call_site_id : - header_->GetCollections().CallSiteIds()) { + for (auto& call_site_id : header_->CallSiteIds()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeCallSiteIdItem)); if (reserve_only) { stream->Skip(call_site_id->GetSize()); @@ -640,15 +633,14 @@ void DexWriter::WriteCallSiteIds(Stream* stream, bool reserve_only) { } } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetCallSiteIdsOffset(start); + header_->CallSiteIds().SetOffset(start); } } void DexWriter::WriteMethodHandles(Stream* stream) { const uint32_t start = stream->Tell(); uint16_t method_handle_buff[4]; - for (std::unique_ptr<dex_ir::MethodHandleItem>& method_handle : - header_->GetCollections().MethodHandleItems()) { + for (auto& method_handle : header_->MethodHandleItems()) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeMethodHandleItem)); method_handle_buff[0] = static_cast<uint16_t>(method_handle->GetMethodHandleType()); method_handle_buff[1] = 0; // unused. @@ -657,7 +649,7 @@ void DexWriter::WriteMethodHandles(Stream* stream) { stream->Write(method_handle_buff, method_handle->GetSize()); } if (compute_offsets_ && start != stream->Tell()) { - header_->GetCollections().SetMethodHandleItemsOffset(start); + header_->MethodHandleItems().SetOffset(start); } } @@ -678,67 +670,66 @@ void DexWriter::WriteMapItems(Stream* stream, MapItemQueue* queue) { } void DexWriter::GenerateAndWriteMapItems(Stream* stream) { - dex_ir::Collections& collection = header_->GetCollections(); MapItemQueue queue; // Header and index section. queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeHeaderItem, 1, 0)); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeStringIdItem, - collection.StringIdsSize(), - collection.StringIdsOffset())); + header_->StringIds().Size(), + header_->StringIds().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeTypeIdItem, - collection.TypeIdsSize(), - collection.TypeIdsOffset())); + header_->TypeIds().Size(), + header_->TypeIds().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeProtoIdItem, - collection.ProtoIdsSize(), - collection.ProtoIdsOffset())); + header_->ProtoIds().Size(), + header_->ProtoIds().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeFieldIdItem, - collection.FieldIdsSize(), - collection.FieldIdsOffset())); + header_->FieldIds().Size(), + header_->FieldIds().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeMethodIdItem, - collection.MethodIdsSize(), - collection.MethodIdsOffset())); + header_->MethodIds().Size(), + header_->MethodIds().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeClassDefItem, - collection.ClassDefsSize(), - collection.ClassDefsOffset())); + header_->ClassDefs().Size(), + header_->ClassDefs().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeCallSiteIdItem, - collection.CallSiteIdsSize(), - collection.CallSiteIdsOffset())); + header_->CallSiteIds().Size(), + header_->CallSiteIds().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeMethodHandleItem, - collection.MethodHandleItemsSize(), - collection.MethodHandleItemsOffset())); + header_->MethodHandleItems().Size(), + header_->MethodHandleItems().GetOffset())); // Data section. - queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeMapList, 1, collection.MapListOffset())); + queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeMapList, 1, header_->MapListOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeTypeList, - collection.TypeListsSize(), - collection.TypeListsOffset())); + header_->TypeLists().Size(), + header_->TypeLists().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeAnnotationSetRefList, - collection.AnnotationSetRefListsSize(), - collection.AnnotationSetRefListsOffset())); + header_->AnnotationSetRefLists().Size(), + header_->AnnotationSetRefLists().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeAnnotationSetItem, - collection.AnnotationSetItemsSize(), - collection.AnnotationSetItemsOffset())); + header_->AnnotationSetItems().Size(), + header_->AnnotationSetItems().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeClassDataItem, - collection.ClassDatasSize(), - collection.ClassDatasOffset())); + header_->ClassDatas().Size(), + header_->ClassDatas().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeCodeItem, - collection.CodeItemsSize(), - collection.CodeItemsOffset())); + header_->CodeItems().Size(), + header_->CodeItems().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeStringDataItem, - collection.StringDatasSize(), - collection.StringDatasOffset())); + header_->StringDatas().Size(), + header_->StringDatas().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeDebugInfoItem, - collection.DebugInfoItemsSize(), - collection.DebugInfoItemsOffset())); + header_->DebugInfoItems().Size(), + header_->DebugInfoItems().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeAnnotationItem, - collection.AnnotationItemsSize(), - collection.AnnotationItemsOffset())); + header_->AnnotationItems().Size(), + header_->AnnotationItems().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeEncodedArrayItem, - collection.EncodedArrayItemsSize(), - collection.EncodedArrayItemsOffset())); + header_->EncodedArrayItems().Size(), + header_->EncodedArrayItems().GetOffset())); queue.AddIfNotEmpty(MapItem(DexFile::kDexTypeAnnotationsDirectoryItem, - collection.AnnotationsDirectoryItemsSize(), - collection.AnnotationsDirectoryItemsOffset())); + header_->AnnotationsDirectoryItems().Size(), + header_->AnnotationsDirectoryItems().GetOffset())); WriteMapItems(stream, &queue); } @@ -761,20 +752,19 @@ void DexWriter::WriteHeader(Stream* stream) { header.endian_tag_ = header_->EndianTag(); header.link_size_ = header_->LinkSize(); header.link_off_ = header_->LinkOffset(); - const dex_ir::Collections& collections = header_->GetCollections(); - header.map_off_ = collections.MapListOffset(); - header.string_ids_size_ = collections.StringIdsSize(); - header.string_ids_off_ = collections.StringIdsOffset(); - header.type_ids_size_ = collections.TypeIdsSize(); - header.type_ids_off_ = collections.TypeIdsOffset(); - header.proto_ids_size_ = collections.ProtoIdsSize(); - header.proto_ids_off_ = collections.ProtoIdsOffset(); - header.field_ids_size_ = collections.FieldIdsSize(); - header.field_ids_off_ = collections.FieldIdsOffset(); - header.method_ids_size_ = collections.MethodIdsSize(); - header.method_ids_off_ = collections.MethodIdsOffset(); - header.class_defs_size_ = collections.ClassDefsSize(); - header.class_defs_off_ = collections.ClassDefsOffset(); + header.map_off_ = header_->MapListOffset(); + header.string_ids_size_ = header_->StringIds().Size(); + header.string_ids_off_ = header_->StringIds().GetOffset(); + header.type_ids_size_ = header_->TypeIds().Size(); + header.type_ids_off_ = header_->TypeIds().GetOffset(); + header.proto_ids_size_ = header_->ProtoIds().Size(); + header.proto_ids_off_ = header_->ProtoIds().GetOffset(); + header.field_ids_size_ = header_->FieldIds().Size(); + header.field_ids_off_ = header_->FieldIds().GetOffset(); + header.method_ids_size_ = header_->MethodIds().Size(); + header.method_ids_off_ = header_->MethodIds().GetOffset(); + header.class_defs_size_ = header_->ClassDefs().Size(); + header.class_defs_off_ = header_->ClassDefs().GetOffset(); header.data_size_ = header_->DataSize(); header.data_off_ = header_->DataOffset(); @@ -797,8 +787,6 @@ bool DexWriter::Write(DexContainer* output, std::string* error_msg) { // Starting offset is right after the header. stream->Seek(GetHeaderSize()); - dex_ir::Collections& collection = header_->GetCollections(); - // Based on: https://source.android.com/devices/tech/dalvik/dex-format // Since the offsets may not be calculated already, the writing must be done in the correct order. const uint32_t string_ids_offset = stream->Tell(); @@ -863,9 +851,9 @@ bool DexWriter::Write(DexContainer* output, std::string* error_msg) { // Write the map list. if (compute_offsets_) { stream->AlignTo(SectionAlignment(DexFile::kDexTypeMapList)); - collection.SetMapListOffset(stream->Tell()); + header_->SetMapListOffset(stream->Tell()); } else { - stream->Seek(collection.MapListOffset()); + stream->Seek(header_->MapListOffset()); } GenerateAndWriteMapItems(stream); stream->AlignTo(kDataSectionAlignment); @@ -882,7 +870,7 @@ bool DexWriter::Write(DexContainer* output, std::string* error_msg) { } // Write link data if it exists. - const std::vector<uint8_t>& link_data = collection.LinkData(); + const std::vector<uint8_t>& link_data = header_->LinkData(); if (link_data.size() > 0) { CHECK_EQ(header_->LinkSize(), static_cast<uint32_t>(link_data.size())); if (compute_offsets_) { diff --git a/dexlayout/dexlayout.cc b/dexlayout/dexlayout.cc index 39d93bfc77..d6dd9d1829 100644 --- a/dexlayout/dexlayout.cc +++ b/dexlayout/dexlayout.cc @@ -417,24 +417,24 @@ static std::unique_ptr<char[]> IndexString(dex_ir::Header* header, outSize = snprintf(buf.get(), buf_size, "<no-index>"); break; case Instruction::kIndexTypeRef: - if (index < header->GetCollections().TypeIdsSize()) { - const char* tp = header->GetCollections().GetTypeId(index)->GetStringId()->Data(); + if (index < header->TypeIds().Size()) { + const char* tp = header->TypeIds()[index]->GetStringId()->Data(); outSize = snprintf(buf.get(), buf_size, "%s // type@%0*x", tp, width, index); } else { outSize = snprintf(buf.get(), buf_size, "<type?> // type@%0*x", width, index); } break; case Instruction::kIndexStringRef: - if (index < header->GetCollections().StringIdsSize()) { - const char* st = header->GetCollections().GetStringId(index)->Data(); + if (index < header->StringIds().Size()) { + const char* st = header->StringIds()[index]->Data(); outSize = snprintf(buf.get(), buf_size, "\"%s\" // string@%0*x", st, width, index); } else { outSize = snprintf(buf.get(), buf_size, "<string?> // string@%0*x", width, index); } break; case Instruction::kIndexMethodRef: - if (index < header->GetCollections().MethodIdsSize()) { - dex_ir::MethodId* method_id = header->GetCollections().GetMethodId(index); + if (index < header->MethodIds().Size()) { + dex_ir::MethodId* method_id = header->MethodIds()[index]; const char* name = method_id->Name()->Data(); std::string type_descriptor = GetSignatureForProtoId(method_id->Proto()); const char* back_descriptor = method_id->Class()->GetStringId()->Data(); @@ -445,8 +445,8 @@ static std::unique_ptr<char[]> IndexString(dex_ir::Header* header, } break; case Instruction::kIndexFieldRef: - if (index < header->GetCollections().FieldIdsSize()) { - dex_ir::FieldId* field_id = header->GetCollections().GetFieldId(index); + if (index < header->FieldIds().Size()) { + dex_ir::FieldId* field_id = header->FieldIds()[index]; const char* name = field_id->Name()->Data(); const char* type_descriptor = field_id->Type()->GetStringId()->Data(); const char* back_descriptor = field_id->Class()->GetStringId()->Data(); @@ -466,15 +466,15 @@ static std::unique_ptr<char[]> IndexString(dex_ir::Header* header, case Instruction::kIndexMethodAndProtoRef: { std::string method("<method?>"); std::string proto("<proto?>"); - if (index < header->GetCollections().MethodIdsSize()) { - dex_ir::MethodId* method_id = header->GetCollections().GetMethodId(index); + if (index < header->MethodIds().Size()) { + dex_ir::MethodId* method_id = header->MethodIds()[index]; const char* name = method_id->Name()->Data(); std::string type_descriptor = GetSignatureForProtoId(method_id->Proto()); const char* back_descriptor = method_id->Class()->GetStringId()->Data(); method = StringPrintf("%s.%s:%s", back_descriptor, name, type_descriptor.c_str()); } - if (secondary_index < header->GetCollections().ProtoIdsSize()) { - dex_ir::ProtoId* proto_id = header->GetCollections().GetProtoId(secondary_index); + if (secondary_index < header->ProtoIds().Size()) { + dex_ir::ProtoId* proto_id = header->ProtoIds()[secondary_index]; proto = GetSignatureForProtoId(proto_id); } outSize = snprintf(buf.get(), buf_size, "%s, %s // method@%0*x, proto@%0*x", @@ -596,7 +596,6 @@ void DexLayout::DumpEncodedValue(const dex_ir::EncodedValue* data) { */ void DexLayout::DumpFileHeader() { char sanitized[8 * 2 + 1]; - dex_ir::Collections& collections = header_->GetCollections(); fprintf(out_file_, "DEX file header:\n"); Asciify(sanitized, header_->Magic(), 8); fprintf(out_file_, "magic : '%s'\n", sanitized); @@ -610,24 +609,24 @@ void DexLayout::DumpFileHeader() { fprintf(out_file_, "link_size : %d\n", header_->LinkSize()); fprintf(out_file_, "link_off : %d (0x%06x)\n", header_->LinkOffset(), header_->LinkOffset()); - fprintf(out_file_, "string_ids_size : %d\n", collections.StringIdsSize()); + fprintf(out_file_, "string_ids_size : %d\n", header_->StringIds().Size()); fprintf(out_file_, "string_ids_off : %d (0x%06x)\n", - collections.StringIdsOffset(), collections.StringIdsOffset()); - fprintf(out_file_, "type_ids_size : %d\n", collections.TypeIdsSize()); + header_->StringIds().GetOffset(), header_->StringIds().GetOffset()); + fprintf(out_file_, "type_ids_size : %d\n", header_->TypeIds().Size()); fprintf(out_file_, "type_ids_off : %d (0x%06x)\n", - collections.TypeIdsOffset(), collections.TypeIdsOffset()); - fprintf(out_file_, "proto_ids_size : %d\n", collections.ProtoIdsSize()); + header_->TypeIds().GetOffset(), header_->TypeIds().GetOffset()); + fprintf(out_file_, "proto_ids_size : %d\n", header_->ProtoIds().Size()); fprintf(out_file_, "proto_ids_off : %d (0x%06x)\n", - collections.ProtoIdsOffset(), collections.ProtoIdsOffset()); - fprintf(out_file_, "field_ids_size : %d\n", collections.FieldIdsSize()); + header_->ProtoIds().GetOffset(), header_->ProtoIds().GetOffset()); + fprintf(out_file_, "field_ids_size : %d\n", header_->FieldIds().Size()); fprintf(out_file_, "field_ids_off : %d (0x%06x)\n", - collections.FieldIdsOffset(), collections.FieldIdsOffset()); - fprintf(out_file_, "method_ids_size : %d\n", collections.MethodIdsSize()); + header_->FieldIds().GetOffset(), header_->FieldIds().GetOffset()); + fprintf(out_file_, "method_ids_size : %d\n", header_->MethodIds().Size()); fprintf(out_file_, "method_ids_off : %d (0x%06x)\n", - collections.MethodIdsOffset(), collections.MethodIdsOffset()); - fprintf(out_file_, "class_defs_size : %d\n", collections.ClassDefsSize()); + header_->MethodIds().GetOffset(), header_->MethodIds().GetOffset()); + fprintf(out_file_, "class_defs_size : %d\n", header_->ClassDefs().Size()); fprintf(out_file_, "class_defs_off : %d (0x%06x)\n", - collections.ClassDefsOffset(), collections.ClassDefsOffset()); + header_->ClassDefs().GetOffset(), header_->ClassDefs().GetOffset()); fprintf(out_file_, "data_size : %d\n", header_->DataSize()); fprintf(out_file_, "data_off : %d (0x%06x)\n\n", header_->DataOffset(), header_->DataOffset()); @@ -638,7 +637,7 @@ void DexLayout::DumpFileHeader() { */ void DexLayout::DumpClassDef(int idx) { // General class information. - dex_ir::ClassDef* class_def = header_->GetCollections().GetClassDef(idx); + dex_ir::ClassDef* class_def = header_->ClassDefs()[idx]; fprintf(out_file_, "Class #%d header:\n", idx); fprintf(out_file_, "class_idx : %d\n", class_def->ClassType()->GetIndex()); fprintf(out_file_, "access_flags : %d (0x%04x)\n", @@ -719,7 +718,7 @@ void DexLayout::DumpAnnotationSetItem(dex_ir::AnnotationSetItem* set_item) { * Dumps class annotations. */ void DexLayout::DumpClassAnnotations(int idx) { - dex_ir::ClassDef* class_def = header_->GetCollections().GetClassDef(idx); + dex_ir::ClassDef* class_def = header_->ClassDefs()[idx]; dex_ir::AnnotationsDirectoryItem* annotations_directory = class_def->Annotations(); if (annotations_directory == nullptr) { return; // none @@ -1039,7 +1038,7 @@ void DexLayout::DumpInstruction(const dex_ir::CodeItem* code, * Dumps a bytecode disassembly. */ void DexLayout::DumpBytecodes(uint32_t idx, const dex_ir::CodeItem* code, uint32_t code_offset) { - dex_ir::MethodId* method_id = header_->GetCollections().GetMethodId(idx); + dex_ir::MethodId* method_id = header_->MethodIds()[idx]; const char* name = method_id->Name()->Data(); std::string type_descriptor = GetSignatureForProtoId(method_id->Proto()); const char* back_descriptor = method_id->Class()->GetStringId()->Data(); @@ -1083,16 +1082,16 @@ static void DumpLocalsCb(void* context, const DexFile::LocalInfo& entry) { /* * Lookup functions. */ -static const char* StringDataByIdx(uint32_t idx, dex_ir::Collections& collections) { - dex_ir::StringId* string_id = collections.GetStringIdOrNullPtr(idx); +static const char* StringDataByIdx(uint32_t idx, dex_ir::Header* header) { + dex_ir::StringId* string_id = header->GetStringIdOrNullPtr(idx); if (string_id == nullptr) { return nullptr; } return string_id->Data(); } -static const char* StringDataByTypeIdx(uint16_t idx, dex_ir::Collections& collections) { - dex_ir::TypeId* type_id = collections.GetTypeIdOrNullPtr(idx); +static const char* StringDataByTypeIdx(uint16_t idx, dex_ir::Header* header) { + dex_ir::TypeId* type_id = header->GetTypeIdOrNullPtr(idx); if (type_id == nullptr) { return nullptr; } @@ -1134,7 +1133,7 @@ void DexLayout::DumpCode(uint32_t idx, if (debug_info != nullptr) { DexFile::DecodeDebugPositionInfo(debug_info->GetDebugInfo(), [this](uint32_t idx) { - return StringDataByIdx(idx, this->header_->GetCollections()); + return StringDataByIdx(idx, this->header_); }, DumpPositionsCb, out_file_); @@ -1161,12 +1160,12 @@ void DexLayout::DumpCode(uint32_t idx, code->InsSize(), code->InsnsSize(), [this](uint32_t idx) { - return StringDataByIdx(idx, this->header_->GetCollections()); + return StringDataByIdx(idx, this->header_); }, [this](uint32_t idx) { return StringDataByTypeIdx(dchecked_integral_cast<uint16_t>(idx), - this->header_->GetCollections()); + this->header_); }, DumpLocalsCb, out_file_); @@ -1182,7 +1181,7 @@ void DexLayout::DumpMethod(uint32_t idx, uint32_t flags, const dex_ir::CodeItem* return; } - dex_ir::MethodId* method_id = header_->GetCollections().GetMethodId(idx); + dex_ir::MethodId* method_id = header_->MethodIds()[idx]; const char* name = method_id->Name()->Data(); char* type_descriptor = strdup(GetSignatureForProtoId(method_id->Proto()).c_str()); const char* back_descriptor = method_id->Class()->GetStringId()->Data(); @@ -1292,7 +1291,7 @@ void DexLayout::DumpSField(uint32_t idx, uint32_t flags, int i, dex_ir::EncodedV return; } - dex_ir::FieldId* field_id = header_->GetCollections().GetFieldId(idx); + dex_ir::FieldId* field_id = header_->FieldIds()[idx]; const char* name = field_id->Name()->Data(); const char* type_descriptor = field_id->Type()->GetStringId()->Data(); const char* back_descriptor = field_id->Class()->GetStringId()->Data(); @@ -1346,7 +1345,7 @@ void DexLayout::DumpIField(uint32_t idx, uint32_t flags, int i) { * the value will be replaced with a newly-allocated string. */ void DexLayout::DumpClass(int idx, char** last_package) { - dex_ir::ClassDef* class_def = header_->GetCollections().GetClassDef(idx); + dex_ir::ClassDef* class_def = header_->ClassDefs()[idx]; // Omitting non-public class. if (options_.exports_only_ && (class_def->GetAccessFlags() & kAccPublic) == 0) { return; @@ -1364,8 +1363,7 @@ void DexLayout::DumpClass(int idx, char** last_package) { // up the classes, sort them, and dump them alphabetically so the // package name wouldn't jump around, but that's not a great plan // for something that needs to run on the device. - const char* class_descriptor = - header_->GetCollections().GetClassDef(idx)->ClassType()->GetStringId()->Data(); + const char* class_descriptor = header_->ClassDefs()[idx]->ClassType()->GetStringId()->Data(); if (!(class_descriptor[0] == 'L' && class_descriptor[strlen(class_descriptor)-1] == ';')) { // Arrays and primitives should not be defined explicitly. Keep going? @@ -1543,7 +1541,7 @@ void DexLayout::DumpDexFile() { // Iterate over all classes. char* package = nullptr; - const uint32_t class_defs_size = header_->GetCollections().ClassDefsSize(); + const uint32_t class_defs_size = header_->ClassDefs().Size(); for (uint32_t i = 0; i < class_defs_size; i++) { DumpClass(i, &package); } // for @@ -1562,13 +1560,13 @@ void DexLayout::DumpDexFile() { void DexLayout::LayoutClassDefsAndClassData(const DexFile* dex_file) { std::vector<dex_ir::ClassDef*> new_class_def_order; - for (std::unique_ptr<dex_ir::ClassDef>& class_def : header_->GetCollections().ClassDefs()) { + for (auto& class_def : header_->ClassDefs()) { dex::TypeIndex type_idx(class_def->ClassType()->GetIndex()); if (info_->ContainsClass(*dex_file, type_idx)) { new_class_def_order.push_back(class_def.get()); } } - for (std::unique_ptr<dex_ir::ClassDef>& class_def : header_->GetCollections().ClassDefs()) { + for (auto& class_def : header_->ClassDefs()) { dex::TypeIndex type_idx(class_def->ClassType()->GetIndex()); if (!info_->ContainsClass(*dex_file, type_idx)) { new_class_def_order.push_back(class_def.get()); @@ -1576,8 +1574,7 @@ void DexLayout::LayoutClassDefsAndClassData(const DexFile* dex_file) { } std::unordered_set<dex_ir::ClassData*> visited_class_data; size_t class_data_index = 0; - dex_ir::CollectionVector<dex_ir::ClassData>::Vector& class_datas = - header_->GetCollections().ClassDatas(); + auto& class_datas = header_->ClassDatas(); for (dex_ir::ClassDef* class_def : new_class_def_order) { dex_ir::ClassData* class_data = class_def->GetClassData(); if (class_data != nullptr && visited_class_data.find(class_data) == visited_class_data.end()) { @@ -1590,15 +1587,14 @@ void DexLayout::LayoutClassDefsAndClassData(const DexFile* dex_file) { ++class_data_index; } } - CHECK_EQ(class_data_index, class_datas.size()); + CHECK_EQ(class_data_index, class_datas.Size()); if (DexLayout::kChangeClassDefOrder) { // This currently produces dex files that violate the spec since the super class class_def is // supposed to occur before any subclasses. - dex_ir::CollectionVector<dex_ir::ClassDef>::Vector& class_defs = - header_->GetCollections().ClassDefs(); - CHECK_EQ(new_class_def_order.size(), class_defs.size()); - for (size_t i = 0; i < class_defs.size(); ++i) { + dex_ir::CollectionVector<dex_ir::ClassDef>& class_defs = header_->ClassDefs(); + CHECK_EQ(new_class_def_order.size(), class_defs.Size()); + for (size_t i = 0; i < class_defs.Size(); ++i) { // Overwrite the existing vector with the new ordering, note that the sets of objects are // equivalent, but the order changes. This is why this is not a memory leak. // TODO: Consider cleaning this up with a shared_ptr. @@ -1609,10 +1605,10 @@ void DexLayout::LayoutClassDefsAndClassData(const DexFile* dex_file) { } void DexLayout::LayoutStringData(const DexFile* dex_file) { - const size_t num_strings = header_->GetCollections().StringIds().size(); + const size_t num_strings = header_->StringIds().Size(); std::vector<bool> is_shorty(num_strings, false); std::vector<bool> from_hot_method(num_strings, false); - for (std::unique_ptr<dex_ir::ClassDef>& class_def : header_->GetCollections().ClassDefs()) { + for (auto& class_def : header_->ClassDefs()) { // A name of a profile class is probably going to get looked up by ClassTable::Lookup, mark it // as hot. Add its super class and interfaces as well, which can be used during initialization. const bool is_profile_class = @@ -1678,7 +1674,7 @@ void DexLayout::LayoutStringData(const DexFile* dex_file) { } // Sort string data by specified order. std::vector<dex_ir::StringId*> string_ids; - for (auto& string_id : header_->GetCollections().StringIds()) { + for (auto& string_id : header_->StringIds()) { string_ids.push_back(string_id.get()); } std::sort(string_ids.begin(), @@ -1699,8 +1695,7 @@ void DexLayout::LayoutStringData(const DexFile* dex_file) { // Order by index by default. return a->GetIndex() < b->GetIndex(); }); - dex_ir::CollectionVector<dex_ir::StringData>::Vector& string_datas = - header_->GetCollections().StringDatas(); + auto& string_datas = header_->StringDatas(); // Now we know what order we want the string data, reorder them. size_t data_index = 0; for (dex_ir::StringId* string_id : string_ids) { @@ -1713,11 +1708,11 @@ void DexLayout::LayoutStringData(const DexFile* dex_file) { for (const std::unique_ptr<dex_ir::StringData>& data : string_datas) { visited.insert(data.get()); } - for (auto& string_id : header_->GetCollections().StringIds()) { + for (auto& string_id : header_->StringIds()) { CHECK(visited.find(string_id->DataItem()) != visited.end()); } } - CHECK_EQ(data_index, string_datas.size()); + CHECK_EQ(data_index, string_datas.Size()); } // Orders code items according to specified class data ordering. @@ -1732,7 +1727,7 @@ void DexLayout::LayoutCodeItems(const DexFile* dex_file) { // Assign hotness flags to all code items. for (InvokeType invoke_type : invoke_types) { - for (std::unique_ptr<dex_ir::ClassDef>& class_def : header_->GetCollections().ClassDefs()) { + for (auto& class_def : header_->ClassDefs()) { const bool is_profile_class = info_->ContainsClass(*dex_file, dex::TypeIndex(class_def->ClassType()->GetIndex())); @@ -1778,8 +1773,7 @@ void DexLayout::LayoutCodeItems(const DexFile* dex_file) { } } - dex_ir::CollectionVector<dex_ir::CodeItem>::Vector& code_items = - header_->GetCollections().CodeItems(); + const auto& code_items = header_->CodeItems(); if (VLOG_IS_ON(dex)) { size_t layout_count[static_cast<size_t>(LayoutType::kLayoutTypeCount)] = {}; for (const std::unique_ptr<dex_ir::CodeItem>& code_item : code_items) { @@ -1871,7 +1865,7 @@ bool DexLayout::ProcessDexFile(const char* file_name, const bool has_output_container = dex_container != nullptr; const bool output = options_.output_dex_directory_ != nullptr || has_output_container; - // Try to avoid eagerly assigning offsets to find bugs since GetOffset will abort if the offset + // Try to avoid eagerly assigning offsets to find bugs since Offset will abort if the offset // is unassigned. bool eagerly_assign_offsets = false; if (options_.visualize_pattern_ || options_.show_section_statistics_ || options_.dump_) { diff --git a/dexlayout/dexlayout_test.cc b/dexlayout/dexlayout_test.cc index f148b94f3d..2b1352db16 100644 --- a/dexlayout/dexlayout_test.cc +++ b/dexlayout/dexlayout_test.cc @@ -468,7 +468,7 @@ class DexLayoutTest : public CommonRuntimeTest { } std::vector<std::string> test_files = { dex_file, profile_file, output_dex, second_output_dex }; - for (auto test_file : test_files) { + for (const std::string& test_file : test_files) { if (!UnlinkFile(test_file)) { return false; } @@ -501,7 +501,7 @@ class DexLayoutTest : public CommonRuntimeTest { } std::vector<std::string> dex_files = { input_dex, output_dex }; - for (auto dex_file : dex_files) { + for (const std::string& dex_file : dex_files) { if (!UnlinkFile(dex_file)) { return false; } diff --git a/imgdiag/imgdiag.cc b/imgdiag/imgdiag.cc index ddb8fe1302..fccc326a9d 100644 --- a/imgdiag/imgdiag.cc +++ b/imgdiag/imgdiag.cc @@ -335,7 +335,7 @@ class ImgObjectVisitor : public ObjectVisitor { using ComputeDirtyFunc = std::function<void(mirror::Object* object, const uint8_t* begin_image_ptr, const std::set<size_t>& dirty_pages)>; - ImgObjectVisitor(ComputeDirtyFunc dirty_func, + ImgObjectVisitor(const ComputeDirtyFunc& dirty_func, const uint8_t* begin_image_ptr, const std::set<size_t>& dirty_pages) : dirty_func_(dirty_func), @@ -356,7 +356,7 @@ class ImgObjectVisitor : public ObjectVisitor { } private: - ComputeDirtyFunc dirty_func_; + const ComputeDirtyFunc& dirty_func_; const uint8_t* begin_image_ptr_; const std::set<size_t>& dirty_pages_; }; @@ -646,7 +646,7 @@ class ImgArtMethodVisitor : public ArtMethodVisitor { using ComputeDirtyFunc = std::function<void(ArtMethod*, const uint8_t*, const std::set<size_t>&)>; - ImgArtMethodVisitor(ComputeDirtyFunc dirty_func, + ImgArtMethodVisitor(const ComputeDirtyFunc& dirty_func, const uint8_t* begin_image_ptr, const std::set<size_t>& dirty_pages) : dirty_func_(dirty_func), @@ -658,7 +658,7 @@ class ImgArtMethodVisitor : public ArtMethodVisitor { } private: - ComputeDirtyFunc dirty_func_; + const ComputeDirtyFunc& dirty_func_; const uint8_t* begin_image_ptr_; const std::set<size_t>& dirty_pages_; }; diff --git a/libartbase/base/atomic.h b/libartbase/base/atomic.h index b68f867bfa..9de84cdd20 100644 --- a/libartbase/base/atomic.h +++ b/libartbase/base/atomic.h @@ -28,6 +28,11 @@ namespace art { +enum class CASMode { + kStrong, + kWeak, +}; + template<typename T> class PACKED(sizeof(T)) Atomic : public std::atomic<T> { public: @@ -100,6 +105,15 @@ class PACKED(sizeof(T)) Atomic : public std::atomic<T> { return this->compare_exchange_weak(expected_value, desired_value, std::memory_order_release); } + bool CompareAndSet(T expected_value, + T desired_value, + CASMode mode, + std::memory_order memory_order) { + return mode == CASMode::kStrong + ? this->compare_exchange_strong(expected_value, desired_value, memory_order) + : this->compare_exchange_weak(expected_value, desired_value, memory_order); + } + // Returns the address of the current atomic variable. This is only used by futex() which is // declared to take a volatile address (see base/mutex-inl.h). volatile T* Address() { diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h index 2cc1a31ade..053bf1f783 100644 --- a/libartbase/base/bit_table.h +++ b/libartbase/base/bit_table.h @@ -64,58 +64,17 @@ ALWAYS_INLINE static inline void EncodeVarintBits(Vector* out, size_t* bit_offse } } +// Generic purpose table of uint32_t values, which are tightly packed at bit level. +// It has its own header with the number of rows and the bit-widths of all columns. +// The values are accessible by (row, column). The value -1 is stored efficiently. template<uint32_t kNumColumns> -class BitTable { +class BitTableBase { public: - class Accessor { - public: - static constexpr uint32_t kCount = kNumColumns; - static constexpr uint32_t kNoValue = std::numeric_limits<uint32_t>::max(); - - Accessor() {} - Accessor(const BitTable* table, uint32_t row) : table_(table), row_(row) {} - - ALWAYS_INLINE uint32_t Row() const { return row_; } - - ALWAYS_INLINE bool IsValid() const { return table_ != nullptr && row_ < table_->NumRows(); } - - template<uint32_t Column> - ALWAYS_INLINE uint32_t Get() const { - static_assert(Column < kNumColumns, "Column out of bounds"); - return table_->Get(row_, Column); - } + static constexpr uint32_t kNoValue = std::numeric_limits<uint32_t>::max(); // == -1. + static constexpr uint32_t kValueBias = kNoValue; // Bias so that -1 is encoded as 0. - ALWAYS_INLINE bool Equals(const Accessor& other) { - return this->table_ == other.table_ && this->row_ == other.row_; - } - -// Helper macro to create constructors and per-table utilities in derived class. -#define BIT_TABLE_HEADER() \ - using BitTable<kCount>::Accessor::Accessor; /* inherit the constructors */ \ - template<int COLUMN, int UNUSED /*needed to compile*/> struct ColumnName; \ - -// Helper macro to create named column accessors in derived class. -#define BIT_TABLE_COLUMN(COLUMN, NAME) \ - static constexpr uint32_t k##NAME = COLUMN; \ - ALWAYS_INLINE uint32_t Get##NAME() const { \ - return table_->Get(row_, COLUMN); \ - } \ - ALWAYS_INLINE bool Has##NAME() const { \ - return table_->Get(row_, COLUMN) != kNoValue; \ - } \ - template<int UNUSED> struct ColumnName<COLUMN, UNUSED> { \ - static constexpr const char* Value = #NAME; \ - }; \ - - protected: - const BitTable* table_ = nullptr; - uint32_t row_ = -1; - }; - - static constexpr uint32_t kValueBias = -1; - - BitTable() {} - BitTable(void* data, size_t size, size_t* bit_offset = 0) { + BitTableBase() {} + BitTableBase(void* data, size_t size, size_t* bit_offset) { Decode(BitMemoryRegion(MemoryRegion(data, size)), bit_offset); } @@ -162,6 +121,7 @@ class BitTable { } size_t HeaderBitSize() const { return header_bit_size_; } + size_t BitSize() const { return header_bit_size_ + table_data_.size_in_bits(); } protected: @@ -172,6 +132,45 @@ class BitTable { uint16_t header_bit_size_ = 0; }; +// Helper class which can be used to create BitTable accessors with named getters. +template<uint32_t NumColumns> +class BitTableAccessor { + public: + static constexpr uint32_t kNumColumns = NumColumns; + static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue; + + BitTableAccessor() {} + BitTableAccessor(const BitTableBase<kNumColumns>* table, uint32_t row) + : table_(table), row_(row) { + } + + ALWAYS_INLINE uint32_t Row() const { return row_; } + + ALWAYS_INLINE bool IsValid() const { return table_ != nullptr && row_ < table_->NumRows(); } + + ALWAYS_INLINE bool Equals(const BitTableAccessor& other) { + return this->table_ == other.table_ && this->row_ == other.row_; + } + +// Helper macro to create constructors and per-table utilities in derived class. +#define BIT_TABLE_HEADER() \ + using BitTableAccessor<kNumColumns>::BitTableAccessor; /* inherit constructors */ \ + template<int COLUMN, int UNUSED /*needed to compile*/> struct ColumnName; \ + +// Helper macro to create named column accessors in derived class. +#define BIT_TABLE_COLUMN(COLUMN, NAME) \ + static constexpr uint32_t k##NAME = COLUMN; \ + ALWAYS_INLINE uint32_t Get##NAME() const { return table_->Get(row_, COLUMN); } \ + ALWAYS_INLINE bool Has##NAME() const { return Get##NAME() != kNoValue; } \ + template<int UNUSED> struct ColumnName<COLUMN, UNUSED> { \ + static constexpr const char* Value = #NAME; \ + }; \ + + protected: + const BitTableBase<kNumColumns>* table_ = nullptr; + uint32_t row_ = -1; +}; + // Template meta-programming helper. template<typename Accessor, size_t... Columns> static const char* const* GetBitTableColumnNamesImpl(std::index_sequence<Columns...>) { @@ -179,19 +178,34 @@ static const char* const* GetBitTableColumnNamesImpl(std::index_sequence<Columns return names; } +// Returns the names of all columns in the given accessor. template<typename Accessor> static const char* const* GetBitTableColumnNames() { - return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kCount>()); + return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kNumColumns>()); } +// Wrapper which makes it easier to use named accessors for the individual rows. +template<typename Accessor> +class BitTable : public BitTableBase<Accessor::kNumColumns> { + public: + using BitTableBase<Accessor::kNumColumns>::BitTableBase; // Constructors. + + ALWAYS_INLINE Accessor GetRow(uint32_t row) const { + return Accessor(this, row); + } +}; + // Helper class for encoding BitTable. It can optionally de-duplicate the inputs. template<uint32_t kNumColumns> -class BitTableBuilder { +class BitTableBuilderBase { public: + static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue; + static constexpr uint32_t kValueBias = BitTableBase<kNumColumns>::kValueBias; + class Entry { public: Entry() { - std::fill_n(data_, kNumColumns, BitTable<kNumColumns>::Accessor::kNoValue); + std::fill_n(data_, kNumColumns, kNoValue); } Entry(std::initializer_list<uint32_t> values) { @@ -213,7 +227,7 @@ class BitTableBuilder { uint32_t data_[kNumColumns]; }; - explicit BitTableBuilder(ScopedArenaAllocator* allocator) + explicit BitTableBuilderBase(ScopedArenaAllocator* allocator) : rows_(allocator->Adapter(kArenaAllocBitTableBuilder)), dedup_(8, allocator->Adapter(kArenaAllocBitTableBuilder)) { } @@ -266,7 +280,7 @@ class BitTableBuilder { std::fill_n(max_column_value, kNumColumns, 0); for (uint32_t r = 0; r < size(); r++) { for (uint32_t c = 0; c < kNumColumns; c++) { - max_column_value[c] |= rows_[r][c] - BitTable<kNumColumns>::kValueBias; + max_column_value[c] |= rows_[r][c] - kValueBias; } } for (uint32_t c = 0; c < kNumColumns; c++) { @@ -277,7 +291,6 @@ class BitTableBuilder { // Encode the stored data into a BitTable. template<typename Vector> void Encode(Vector* out, size_t* bit_offset) const { - constexpr uint32_t bias = BitTable<kNumColumns>::kValueBias; size_t initial_bit_offset = *bit_offset; std::array<uint32_t, kNumColumns> column_bits; @@ -295,14 +308,14 @@ class BitTableBuilder { BitMemoryRegion region(MemoryRegion(out->data(), out->size())); for (uint32_t r = 0; r < size(); r++) { for (uint32_t c = 0; c < kNumColumns; c++) { - region.StoreBitsAndAdvance(bit_offset, rows_[r][c] - bias, column_bits[c]); + region.StoreBitsAndAdvance(bit_offset, rows_[r][c] - kValueBias, column_bits[c]); } } } // Verify the written data. if (kIsDebugBuild) { - BitTable<kNumColumns> table; + BitTableBase<kNumColumns> table; BitMemoryRegion region(MemoryRegion(out->data(), out->size())); table.Decode(region, &initial_bit_offset); DCHECK_EQ(size(), table.NumRows()); @@ -322,6 +335,12 @@ class BitTableBuilder { ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_; // Hash -> row index. }; +template<typename Accessor> +class BitTableBuilder : public BitTableBuilderBase<Accessor::kNumColumns> { + public: + using BitTableBuilderBase<Accessor::kNumColumns>::BitTableBuilderBase; // Constructors. +}; + // Helper class for encoding single-column BitTable of bitmaps (allows more than 32 bits). class BitmapTableBuilder { public: @@ -384,7 +403,7 @@ class BitmapTableBuilder { // Verify the written data. if (kIsDebugBuild) { - BitTable<1> table; + BitTableBase<1> table; BitMemoryRegion region(MemoryRegion(out->data(), out->size())); table.Decode(region, &initial_bit_offset); DCHECK_EQ(size(), table.NumRows()); diff --git a/libartbase/base/bit_table_test.cc b/libartbase/base/bit_table_test.cc index 969940fe39..ee7cb3a14e 100644 --- a/libartbase/base/bit_table_test.cc +++ b/libartbase/base/bit_table_test.cc @@ -50,11 +50,11 @@ TEST(BitTableTest, TestEmptyTable) { std::vector<uint8_t> buffer; size_t encode_bit_offset = 0; - BitTableBuilder<1> builder(&allocator); + BitTableBuilderBase<1> builder(&allocator); builder.Encode(&buffer, &encode_bit_offset); size_t decode_bit_offset = 0; - BitTable<1> table(buffer.data(), buffer.size(), &decode_bit_offset); + BitTableBase<1> table(buffer.data(), buffer.size(), &decode_bit_offset); EXPECT_EQ(encode_bit_offset, decode_bit_offset); EXPECT_EQ(0u, table.NumRows()); } @@ -67,7 +67,7 @@ TEST(BitTableTest, TestSingleColumnTable) { constexpr uint32_t kNoValue = -1; std::vector<uint8_t> buffer; size_t encode_bit_offset = 0; - BitTableBuilder<1> builder(&allocator); + BitTableBuilderBase<1> builder(&allocator); builder.Add({42u}); builder.Add({kNoValue}); builder.Add({1000u}); @@ -75,7 +75,7 @@ TEST(BitTableTest, TestSingleColumnTable) { builder.Encode(&buffer, &encode_bit_offset); size_t decode_bit_offset = 0; - BitTable<1> table(buffer.data(), buffer.size(), &decode_bit_offset); + BitTableBase<1> table(buffer.data(), buffer.size(), &decode_bit_offset); EXPECT_EQ(encode_bit_offset, decode_bit_offset); EXPECT_EQ(4u, table.NumRows()); EXPECT_EQ(42u, table.Get(0)); @@ -93,12 +93,12 @@ TEST(BitTableTest, TestUnalignedTable) { for (size_t start_bit_offset = 0; start_bit_offset <= 32; start_bit_offset++) { std::vector<uint8_t> buffer; size_t encode_bit_offset = start_bit_offset; - BitTableBuilder<1> builder(&allocator); + BitTableBuilderBase<1> builder(&allocator); builder.Add({42u}); builder.Encode(&buffer, &encode_bit_offset); size_t decode_bit_offset = start_bit_offset; - BitTable<1> table(buffer.data(), buffer.size(), &decode_bit_offset); + BitTableBase<1> table(buffer.data(), buffer.size(), &decode_bit_offset); EXPECT_EQ(encode_bit_offset, decode_bit_offset) << " start_bit_offset=" << start_bit_offset; EXPECT_EQ(1u, table.NumRows()); EXPECT_EQ(42u, table.Get(0)); @@ -113,13 +113,13 @@ TEST(BitTableTest, TestBigTable) { constexpr uint32_t kNoValue = -1; std::vector<uint8_t> buffer; size_t encode_bit_offset = 0; - BitTableBuilder<4> builder(&allocator); + BitTableBuilderBase<4> builder(&allocator); builder.Add({42u, kNoValue, 0u, static_cast<uint32_t>(-2)}); builder.Add({62u, kNoValue, 63u, static_cast<uint32_t>(-3)}); builder.Encode(&buffer, &encode_bit_offset); size_t decode_bit_offset = 0; - BitTable<4> table(buffer.data(), buffer.size(), &decode_bit_offset); + BitTableBase<4> table(buffer.data(), buffer.size(), &decode_bit_offset); EXPECT_EQ(encode_bit_offset, decode_bit_offset); EXPECT_EQ(2u, table.NumRows()); EXPECT_EQ(42u, table.Get(0, 0)); @@ -141,9 +141,9 @@ TEST(BitTableTest, TestDedup) { ArenaStack arena_stack(&pool); ScopedArenaAllocator allocator(&arena_stack); - BitTableBuilder<2> builder(&allocator); - BitTableBuilder<2>::Entry value0{1, 2}; - BitTableBuilder<2>::Entry value1{3, 4}; + BitTableBuilderBase<2> builder(&allocator); + BitTableBuilderBase<2>::Entry value0{1, 2}; + BitTableBuilderBase<2>::Entry value1{3, 4}; EXPECT_EQ(0u, builder.Dedup(&value0)); EXPECT_EQ(1u, builder.Dedup(&value1)); EXPECT_EQ(0u, builder.Dedup(&value0)); @@ -169,7 +169,7 @@ TEST(BitTableTest, TestBitmapTable) { EXPECT_EQ(1 + static_cast<uint32_t>(POPCOUNT(value)), builder.size()); size_t decode_bit_offset = 0; - BitTable<1> table(buffer.data(), buffer.size(), &decode_bit_offset); + BitTableBase<1> table(buffer.data(), buffer.size(), &decode_bit_offset); EXPECT_EQ(encode_bit_offset, decode_bit_offset); for (auto it : indicies) { uint64_t expected = it.first; @@ -187,10 +187,10 @@ TEST(BitTableTest, TestCollisions) { ScopedArenaAllocator allocator(&arena_stack); FNVHash<MemoryRegion> hasher; - BitTableBuilder<2>::Entry value0{56948505, 0}; - BitTableBuilder<2>::Entry value1{67108869, 0}; + BitTableBuilderBase<2>::Entry value0{56948505, 0}; + BitTableBuilderBase<2>::Entry value1{67108869, 0}; - BitTableBuilder<2> builder(&allocator); + BitTableBuilderBase<2> builder(&allocator); EXPECT_EQ(hasher(MemoryRegion(&value0, sizeof(value0))), hasher(MemoryRegion(&value1, sizeof(value1)))); EXPECT_EQ(0u, builder.Dedup(&value0)); diff --git a/libartbase/base/common_art_test.h b/libartbase/base/common_art_test.h index d9bea3d97a..0ace09de1a 100644 --- a/libartbase/base/common_art_test.h +++ b/libartbase/base/common_art_test.h @@ -222,4 +222,10 @@ using CommonArtTestWithParam = CommonArtTestBase<testing::TestWithParam<Param>>; } } // namespace art +#define TEST_DISABLED_FOR_MEMORY_TOOL_WITH_HEAP_POISONING() \ + if (kRunningOnMemoryTool && kPoisonHeapReferences) { \ + printf("WARNING: TEST DISABLED FOR MEMORY TOOL WITH HEAP POISONING\n"); \ + return; \ + } + #endif // ART_LIBARTBASE_BASE_COMMON_ART_TEST_H_ diff --git a/libartbase/base/file_utils_test.cc b/libartbase/base/file_utils_test.cc index 56d1c44fc0..2a7273b85e 100644 --- a/libartbase/base/file_utils_test.cc +++ b/libartbase/base/file_utils_test.cc @@ -69,12 +69,11 @@ TEST_F(FileUtilsTest, GetAndroidRootSafe) { EXPECT_EQ(android_root, android_root_env); // Set ANDROID_ROOT to something else (but the directory must exist). So use dirname. - char* root_dup = strdup(android_root_env.c_str()); - char* dir = dirname(root_dup); + UniqueCPtr<char> root_dup(strdup(android_root_env.c_str())); + char* dir = dirname(root_dup.get()); ASSERT_EQ(0, setenv("ANDROID_ROOT", dir, 1 /* overwrite */)); std::string android_root2 = GetAndroidRootSafe(&error_msg); EXPECT_STREQ(dir, android_root2.c_str()); - free(root_dup); // Set a bogus value for ANDROID_ROOT. This should be an error. ASSERT_EQ(0, setenv("ANDROID_ROOT", "/this/is/obviously/bogus", 1 /* overwrite */)); diff --git a/libartbase/base/variant_map_test.cc b/libartbase/base/variant_map_test.cc index 4677b6d3b3..f2da3389b1 100644 --- a/libartbase/base/variant_map_test.cc +++ b/libartbase/base/variant_map_test.cc @@ -108,7 +108,7 @@ TEST(VariantMaps, RuleOfFive) { EXPECT_EQ(size_t(2), fmFilled.Size()); // Test copy constructor - FruitMap fmEmptyCopy(fmEmpty); + FruitMap fmEmptyCopy(fmEmpty); // NOLINT EXPECT_EQ(size_t(0), fmEmptyCopy.Size()); // Test copy constructor diff --git a/libdexfile/dex/dex_instruction_test.cc b/libdexfile/dex/dex_instruction_test.cc index c944085b9e..6ce9dbafc8 100644 --- a/libdexfile/dex/dex_instruction_test.cc +++ b/libdexfile/dex/dex_instruction_test.cc @@ -135,7 +135,7 @@ TEST(Instruction, PropertiesOf4rcc) { static void Build35c(uint16_t* out, Instruction::Code code, uint16_t method_idx, - std::vector<uint16_t> args) { + const std::vector<uint16_t>& args) { out[0] = 0; out[0] |= (args.size() << 12); out[0] |= static_cast<uint16_t>(code); @@ -152,7 +152,7 @@ static void Build35c(uint16_t* out, static std::string DumpInst35c(Instruction::Code code, uint16_t method_idx, - std::vector<uint16_t> args) { + const std::vector<uint16_t>& args) { uint16_t inst[6] = {}; Build35c(inst, code, method_idx, args); return Instruction::At(inst)->DumpString(nullptr); diff --git a/libprofile/profile/profile_compilation_info.cc b/libprofile/profile/profile_compilation_info.cc index 748e24e27c..1bb84b18dc 100644 --- a/libprofile/profile/profile_compilation_info.cc +++ b/libprofile/profile/profile_compilation_info.cc @@ -1383,7 +1383,7 @@ bool ProfileCompilationInfo::RemapProfileIndex( // the current profile info. // Note that the number of elements should be very small, so this should not // be a performance issue. - for (const ProfileLineHeader other_profile_line_header : profile_line_headers) { + for (const ProfileLineHeader& other_profile_line_header : profile_line_headers) { if (!filter_fn(other_profile_line_header.dex_location, other_profile_line_header.checksum)) { continue; } diff --git a/profman/profman.cc b/profman/profman.cc index 5fbce66412..9b470973c6 100644 --- a/profman/profman.cc +++ b/profman/profman.cc @@ -389,7 +389,7 @@ class ProfMan FINAL { } bool OpenApkFilesFromLocations( - std::function<void(std::unique_ptr<const DexFile>&&)> process_fn) { + const std::function<void(std::unique_ptr<const DexFile>&&)>& process_fn) { bool use_apk_fd_list = !apks_fd_.empty(); if (use_apk_fd_list) { // Get the APKs from the collection of FDs. diff --git a/runtime/Android.bp b/runtime/Android.bp index 1168798f38..b347019be2 100644 --- a/runtime/Android.bp +++ b/runtime/Android.bp @@ -37,7 +37,6 @@ libart_cc_defaults { "base/quasi_atomic.cc", "base/timing_logger.cc", "cha.cc", - "check_jni.cc", "class_linker.cc", "class_loader_context.cc", "class_root.cc", @@ -112,6 +111,7 @@ libart_cc_defaults { "jit/jit_code_cache.cc", "jit/profiling_info.cc", "jit/profile_saver.cc", + "jni/check_jni.cc", "jni/java_vm_ext.cc", "jni/jni_env_ext.cc", "jni/jni_internal.cc", diff --git a/runtime/class_loader_utils.h b/runtime/class_loader_utils.h index af42878e97..78ad568d25 100644 --- a/runtime/class_loader_utils.h +++ b/runtime/class_loader_utils.h @@ -147,8 +147,14 @@ inline void VisitClassLoaderDexFiles(ScopedObjectAccessAlreadyRunnable& soa, Handle<mirror::ClassLoader> class_loader, Visitor fn) REQUIRES_SHARED(Locks::mutator_lock_) { - auto helper = [&fn](const art::DexFile* dex_file, void** ATTRIBUTE_UNUSED) + auto helper = [&fn](const art::DexFile* dex_file, void** ret) REQUIRES_SHARED(Locks::mutator_lock_) { +#ifdef __clang_analyzer__ + *ret = nullptr; +#else + UNUSED(ret); +#endif + return fn(dex_file); }; VisitClassLoaderDexFiles<decltype(helper), void*>(soa, diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index c4d2fdda0b..cbd98800f4 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -1551,7 +1551,7 @@ inline void ConcurrentCopying::ProcessMarkStackRef(mirror::Object* to_ref) { // above IsInToSpace() evaluates to true and we change the color from gray to white here in this // else block. if (kUseBakerReadBarrier) { - bool success = to_ref->AtomicSetReadBarrierState</*kCasRelease*/true>( + bool success = to_ref->AtomicSetReadBarrierState<std::memory_order_release>( ReadBarrier::GrayState(), ReadBarrier::WhiteState()); DCHECK(success) << "Must succeed as we won the race."; @@ -1822,7 +1822,7 @@ void ConcurrentCopying::ReclaimPhase() { std::string ConcurrentCopying::DumpReferenceInfo(mirror::Object* ref, const char* ref_name, - std::string indent) { + const char* indent) { std::ostringstream oss; oss << indent << heap_->GetVerification()->DumpObjectInfo(ref, ref_name) << '\n'; if (ref != nullptr) { @@ -1846,13 +1846,13 @@ std::string ConcurrentCopying::DumpHeapReference(mirror::Object* obj, MemberOffset offset, mirror::Object* ref) { std::ostringstream oss; - std::string indent = " "; - oss << indent << "Invalid reference: ref=" << ref + constexpr const char* kIndent = " "; + oss << kIndent << "Invalid reference: ref=" << ref << " referenced from: object=" << obj << " offset= " << offset << '\n'; // Information about `obj`. - oss << DumpReferenceInfo(obj, "obj", indent) << '\n'; + oss << DumpReferenceInfo(obj, "obj", kIndent) << '\n'; // Information about `ref`. - oss << DumpReferenceInfo(ref, "ref", indent); + oss << DumpReferenceInfo(ref, "ref", kIndent); return oss.str(); } @@ -1928,10 +1928,10 @@ class RootPrinter { std::string ConcurrentCopying::DumpGcRoot(mirror::Object* ref) { std::ostringstream oss; - std::string indent = " "; - oss << indent << "Invalid GC root: ref=" << ref << '\n'; + constexpr const char* kIndent = " "; + oss << kIndent << "Invalid GC root: ref=" << ref << '\n'; // Information about `ref`. - oss << DumpReferenceInfo(ref, "ref", indent); + oss << DumpReferenceInfo(ref, "ref", kIndent); return oss.str(); } @@ -2490,7 +2490,10 @@ mirror::Object* ConcurrentCopying::Copy(Thread* const self, LockWord new_lock_word = LockWord::FromForwardingAddress(reinterpret_cast<size_t>(to_ref)); // Try to atomically write the fwd ptr. - bool success = from_ref->CasLockWordWeakRelaxed(old_lock_word, new_lock_word); + bool success = from_ref->CasLockWord(old_lock_word, + new_lock_word, + CASMode::kWeak, + std::memory_order_relaxed); if (LIKELY(success)) { // The CAS succeeded. DCHECK(thread_running_gc_ != nullptr); diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h index f1e7e2fd23..448525d013 100644 --- a/runtime/gc/collector/concurrent_copying.h +++ b/runtime/gc/collector/concurrent_copying.h @@ -242,7 +242,7 @@ class ConcurrentCopying : public GarbageCollector { REQUIRES_SHARED(Locks::mutator_lock_); // Dump information about reference `ref` and return it as a string. // Use `ref_name` to name the reference in messages. Each message is prefixed with `indent`. - std::string DumpReferenceInfo(mirror::Object* ref, const char* ref_name, std::string indent = "") + std::string DumpReferenceInfo(mirror::Object* ref, const char* ref_name, const char* indent = "") REQUIRES_SHARED(Locks::mutator_lock_); // Dump information about heap reference `ref`, referenced from object `obj` at offset `offset`, // and return it as a string. diff --git a/runtime/gc/heap_verification_test.cc b/runtime/gc/heap_verification_test.cc index 38695332bb..6caca84854 100644 --- a/runtime/gc/heap_verification_test.cc +++ b/runtime/gc/heap_verification_test.cc @@ -83,7 +83,7 @@ TEST_F(VerificationTest, IsValidClassOrNotInHeap) { } TEST_F(VerificationTest, IsValidClassInHeap) { - TEST_DISABLED_FOR_MEMORY_TOOL(); + TEST_DISABLED_FOR_MEMORY_TOOL_WITH_HEAP_POISONING(); ScopedObjectAccess soa(Thread::Current()); VariableSizedHandleScope hs(soa.Self()); Handle<mirror::String> string( @@ -106,7 +106,7 @@ TEST_F(VerificationTest, DumpInvalidObjectInfo) { } TEST_F(VerificationTest, DumpValidObjectInfo) { - TEST_DISABLED_FOR_MEMORY_TOOL(); + TEST_DISABLED_FOR_MEMORY_TOOL_WITH_HEAP_POISONING(); ScopedLogSeverity sls(LogSeverity::INFO); ScopedObjectAccess soa(Thread::Current()); Runtime* const runtime = Runtime::Current(); @@ -126,7 +126,7 @@ TEST_F(VerificationTest, DumpValidObjectInfo) { } TEST_F(VerificationTest, LogHeapCorruption) { - TEST_DISABLED_FOR_MEMORY_TOOL(); + TEST_DISABLED_FOR_MEMORY_TOOL_WITH_HEAP_POISONING(); ScopedLogSeverity sls(LogSeverity::INFO); ScopedObjectAccess soa(Thread::Current()); Runtime* const runtime = Runtime::Current(); @@ -147,7 +147,6 @@ TEST_F(VerificationTest, LogHeapCorruption) { } TEST_F(VerificationTest, FindPathFromRootSet) { - TEST_DISABLED_FOR_MEMORY_TOOL(); ScopedLogSeverity sls(LogSeverity::INFO); ScopedObjectAccess soa(Thread::Current()); Runtime* const runtime = Runtime::Current(); diff --git a/runtime/interpreter/unstarted_runtime.cc b/runtime/interpreter/unstarted_runtime.cc index 667bd03c18..6a4cf56e2a 100644 --- a/runtime/interpreter/unstarted_runtime.cc +++ b/runtime/interpreter/unstarted_runtime.cc @@ -1855,11 +1855,17 @@ void UnstartedRuntime::UnstartedJNIUnsafeCompareAndSwapInt( jint newValue = args[4]; bool success; if (Runtime::Current()->IsActiveTransaction()) { - success = obj->CasFieldStrongSequentiallyConsistent32<true>(MemberOffset(offset), - expectedValue, newValue); + success = obj->CasField32<true>(MemberOffset(offset), + expectedValue, + newValue, + CASMode::kStrong, + std::memory_order_seq_cst); } else { - success = obj->CasFieldStrongSequentiallyConsistent32<false>(MemberOffset(offset), - expectedValue, newValue); + success = obj->CasField32<false>(MemberOffset(offset), + expectedValue, + newValue, + CASMode::kStrong, + std::memory_order_seq_cst); } result->SetZ(success ? JNI_TRUE : JNI_FALSE); } diff --git a/runtime/interpreter/unstarted_runtime_test.cc b/runtime/interpreter/unstarted_runtime_test.cc index 01e74962ba..200fc5b334 100644 --- a/runtime/interpreter/unstarted_runtime_test.cc +++ b/runtime/interpreter/unstarted_runtime_test.cc @@ -43,6 +43,18 @@ namespace art { namespace interpreter { +// Deleter to be used with ShadowFrame::CreateDeoptimizedFrame objects. +struct DeoptShadowFrameDelete { + // NOTE: Deleting a const object is valid but free() takes a non-const pointer. + void operator()(ShadowFrame* ptr) const { + if (ptr != nullptr) { + ShadowFrame::DeleteDeoptimizedFrame(ptr); + } + } +}; +// Alias for std::unique_ptr<> that uses the above deleter. +using UniqueDeoptShadowFramePtr = std::unique_ptr<ShadowFrame, DeoptShadowFrameDelete>; + class UnstartedRuntimeTest : public CommonRuntimeTest { protected: // Re-expose all UnstartedRuntime implementations so we don't need to declare a million @@ -79,6 +91,14 @@ class UnstartedRuntimeTest : public CommonRuntimeTest { #undef UNSTARTED_RUNTIME_JNI_LIST #undef UNSTARTED_JNI + UniqueDeoptShadowFramePtr CreateShadowFrame(uint32_t num_vregs, + ShadowFrame* link, + ArtMethod* method, + uint32_t dex_pc) { + return UniqueDeoptShadowFramePtr( + ShadowFrame::CreateDeoptimizedFrame(num_vregs, link, method, dex_pc)); + } + // Helpers for ArrayCopy. // // Note: as we have to use handles, we use StackHandleScope to transfer data. Hardcode a size @@ -213,17 +233,15 @@ TEST_F(UnstartedRuntimeTest, MemoryPeekByte) { const uint8_t* base_ptr = base_array; JValue result; - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); for (int32_t i = 0; i < kBaseLen; ++i) { tmp->SetVRegLong(0, static_cast<int64_t>(reinterpret_cast<intptr_t>(base_ptr + i))); - UnstartedMemoryPeekByte(self, tmp, &result, 0); + UnstartedMemoryPeekByte(self, tmp.get(), &result, 0); EXPECT_EQ(result.GetB(), static_cast<int8_t>(base_array[i])); } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, MemoryPeekShort) { @@ -235,20 +253,18 @@ TEST_F(UnstartedRuntimeTest, MemoryPeekShort) { const uint8_t* base_ptr = base_array; JValue result; - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); int32_t adjusted_length = kBaseLen - sizeof(int16_t); for (int32_t i = 0; i < adjusted_length; ++i) { tmp->SetVRegLong(0, static_cast<int64_t>(reinterpret_cast<intptr_t>(base_ptr + i))); - UnstartedMemoryPeekShort(self, tmp, &result, 0); + UnstartedMemoryPeekShort(self, tmp.get(), &result, 0); typedef int16_t unaligned_short __attribute__ ((aligned (1))); const unaligned_short* short_ptr = reinterpret_cast<const unaligned_short*>(base_ptr + i); EXPECT_EQ(result.GetS(), *short_ptr); } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, MemoryPeekInt) { @@ -260,20 +276,18 @@ TEST_F(UnstartedRuntimeTest, MemoryPeekInt) { const uint8_t* base_ptr = base_array; JValue result; - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); int32_t adjusted_length = kBaseLen - sizeof(int32_t); for (int32_t i = 0; i < adjusted_length; ++i) { tmp->SetVRegLong(0, static_cast<int64_t>(reinterpret_cast<intptr_t>(base_ptr + i))); - UnstartedMemoryPeekInt(self, tmp, &result, 0); + UnstartedMemoryPeekInt(self, tmp.get(), &result, 0); typedef int32_t unaligned_int __attribute__ ((aligned (1))); const unaligned_int* int_ptr = reinterpret_cast<const unaligned_int*>(base_ptr + i); EXPECT_EQ(result.GetI(), *int_ptr); } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, MemoryPeekLong) { @@ -285,20 +299,18 @@ TEST_F(UnstartedRuntimeTest, MemoryPeekLong) { const uint8_t* base_ptr = base_array; JValue result; - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); int32_t adjusted_length = kBaseLen - sizeof(int64_t); for (int32_t i = 0; i < adjusted_length; ++i) { tmp->SetVRegLong(0, static_cast<int64_t>(reinterpret_cast<intptr_t>(base_ptr + i))); - UnstartedMemoryPeekLong(self, tmp, &result, 0); + UnstartedMemoryPeekLong(self, tmp.get(), &result, 0); typedef int64_t unaligned_long __attribute__ ((aligned (1))); const unaligned_long* long_ptr = reinterpret_cast<const unaligned_long*>(base_ptr + i); EXPECT_EQ(result.GetJ(), *long_ptr); } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, StringGetCharsNoCheck) { @@ -317,7 +329,7 @@ TEST_F(UnstartedRuntimeTest, StringGetCharsNoCheck) { uint16_t buf[kBaseLen]; JValue result; - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); for (int32_t start_index = 0; start_index < kBaseLen; ++start_index) { for (int32_t count = 0; count <= kBaseLen; ++count) { @@ -333,7 +345,7 @@ TEST_F(UnstartedRuntimeTest, StringGetCharsNoCheck) { // Copy the char_array into buf. memcpy(buf, h_char_array->GetData(), kBaseLen * sizeof(uint16_t)); - UnstartedStringCharAt(self, tmp, &result, 0); + UnstartedStringCharAt(self, tmp.get(), &result, 0); uint16_t* data = h_char_array->GetData(); @@ -357,8 +369,6 @@ TEST_F(UnstartedRuntimeTest, StringGetCharsNoCheck) { } } } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, StringCharAt) { @@ -371,18 +381,16 @@ TEST_F(UnstartedRuntimeTest, StringCharAt) { ObjPtr<mirror::String> test_string = mirror::String::AllocFromModifiedUtf8(self, base_string); JValue result; - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); for (int32_t i = 0; i < base_len; ++i) { tmp->SetVRegReference(0, test_string); tmp->SetVReg(1, i); - UnstartedStringCharAt(self, tmp, &result, 0); + UnstartedStringCharAt(self, tmp.get(), &result, 0); EXPECT_EQ(result.GetI(), base_string[i]); } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, StringInit) { @@ -398,7 +406,7 @@ TEST_F(UnstartedRuntimeTest, StringInit) { uint16_t inst_data[3] = { 0x2070, 0x0000, 0x0010 }; JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, method, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, method, 0); const char* base_string = "hello_world"; StackHandleScope<2> hs(self); Handle<mirror::String> string_arg = @@ -433,8 +441,6 @@ TEST_F(UnstartedRuntimeTest, StringInit) { } EXPECT_EQ(equal, true); } - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } // Tests the exceptions that should be checked before modifying the destination. @@ -443,7 +449,7 @@ TEST_F(UnstartedRuntimeTest, SystemArrayCopyObjectArrayTestExceptions) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); JValue result; - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); // Note: all tests are not GC safe. Assume there's no GC running here with the few objects we // allocate. @@ -458,26 +464,24 @@ TEST_F(UnstartedRuntimeTest, SystemArrayCopyObjectArrayTestExceptions) { Handle<mirror::ObjectArray<mirror::Object>> array( hs_misc.NewHandle(CreateObjectArray(self, object_class.Get(), hs_data))); - RunArrayCopy(self, tmp, true, array.Get(), -1, array.Get(), 0, 0); - RunArrayCopy(self, tmp, true, array.Get(), 0, array.Get(), -1, 0); - RunArrayCopy(self, tmp, true, array.Get(), 0, array.Get(), 0, -1); - RunArrayCopy(self, tmp, true, array.Get(), 0, array.Get(), 0, 4); - RunArrayCopy(self, tmp, true, array.Get(), 0, array.Get(), 1, 3); - RunArrayCopy(self, tmp, true, array.Get(), 1, array.Get(), 0, 3); + RunArrayCopy(self, tmp.get(), true, array.Get(), -1, array.Get(), 0, 0); + RunArrayCopy(self, tmp.get(), true, array.Get(), 0, array.Get(), -1, 0); + RunArrayCopy(self, tmp.get(), true, array.Get(), 0, array.Get(), 0, -1); + RunArrayCopy(self, tmp.get(), true, array.Get(), 0, array.Get(), 0, 4); + RunArrayCopy(self, tmp.get(), true, array.Get(), 0, array.Get(), 1, 3); + RunArrayCopy(self, tmp.get(), true, array.Get(), 1, array.Get(), 0, 3); Handle<mirror::ObjectArray<mirror::Object>> class_as_array = hs_misc.NewHandle(reinterpret_cast<mirror::ObjectArray<mirror::Object>*>(object_class.Get())); - RunArrayCopy(self, tmp, true, class_as_array.Get(), 0, array.Get(), 0, 0); - RunArrayCopy(self, tmp, true, array.Get(), 0, class_as_array.Get(), 0, 0); - - ShadowFrame::DeleteDeoptimizedFrame(tmp); + RunArrayCopy(self, tmp.get(), true, class_as_array.Get(), 0, array.Get(), 0, 0); + RunArrayCopy(self, tmp.get(), true, array.Get(), 0, class_as_array.Get(), 0, 0); } TEST_F(UnstartedRuntimeTest, SystemArrayCopyObjectArrayTest) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); JValue result; - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); StackHandleScope<1> hs_object(self); Handle<mirror::Class> object_class(hs_object.NewHandle(GetClassRoot<mirror::Object>())); @@ -501,7 +505,7 @@ TEST_F(UnstartedRuntimeTest, SystemArrayCopyObjectArrayTest) { hs_expected.NewHandle(hs_src.GetReference(1)); RunArrayCopy(self, - tmp, + tmp.get(), false, object_class.Get(), object_class.Get(), @@ -532,7 +536,7 @@ TEST_F(UnstartedRuntimeTest, SystemArrayCopyObjectArrayTest) { hs_expected.NewHandle(hs_dst.GetReference(2)); RunArrayCopy(self, - tmp, + tmp.get(), false, object_class.Get(), GetClassRoot<mirror::String>(), @@ -563,7 +567,7 @@ TEST_F(UnstartedRuntimeTest, SystemArrayCopyObjectArrayTest) { hs_expected.NewHandle(hs_dst.GetReference(2)); RunArrayCopy(self, - tmp, + tmp.get(), true, object_class.Get(), GetClassRoot<mirror::String>(), @@ -574,15 +578,13 @@ TEST_F(UnstartedRuntimeTest, SystemArrayCopyObjectArrayTest) { 3, hs_expected); } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, IntegerParseIntTest) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); // Test string. Should be valid, and between minimal values of LONG_MIN and LONG_MAX (for all // suffixes). @@ -616,13 +618,11 @@ TEST_F(UnstartedRuntimeTest, IntegerParseIntTest) { tmp->SetVRegReference(0, h_str.Get()); JValue result; - UnstartedIntegerParseInt(self, tmp, &result, 0); + UnstartedIntegerParseInt(self, tmp.get(), &result, 0); ASSERT_FALSE(self->IsExceptionPending()); EXPECT_EQ(result.GetI(), test_values[i]); } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } // Right now the same as Integer.Parse @@ -630,7 +630,7 @@ TEST_F(UnstartedRuntimeTest, LongParseLongTest) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); // Test string. Should be valid, and between minimal values of LONG_MIN and LONG_MAX (for all // suffixes). @@ -664,20 +664,18 @@ TEST_F(UnstartedRuntimeTest, LongParseLongTest) { tmp->SetVRegReference(0, h_str.Get()); JValue result; - UnstartedLongParseLong(self, tmp, &result, 0); + UnstartedLongParseLong(self, tmp.get(), &result, 0); ASSERT_FALSE(self->IsExceptionPending()); EXPECT_EQ(result.GetJ(), test_values[i]); } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, Ceil) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); constexpr double nan = std::numeric_limits<double>::quiet_NaN(); constexpr double inf = std::numeric_limits<double>::infinity(); @@ -697,16 +695,14 @@ TEST_F(UnstartedRuntimeTest, Ceil) { { ld2, ld2 } }; - TestCeilFloor(true /* ceil */, self, tmp, test_pairs, arraysize(test_pairs)); - - ShadowFrame::DeleteDeoptimizedFrame(tmp); + TestCeilFloor(true /* ceil */, self, tmp.get(), test_pairs, arraysize(test_pairs)); } TEST_F(UnstartedRuntimeTest, Floor) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); constexpr double nan = std::numeric_limits<double>::quiet_NaN(); constexpr double inf = std::numeric_limits<double>::infinity(); @@ -726,16 +722,14 @@ TEST_F(UnstartedRuntimeTest, Floor) { { ld2, ld2 } }; - TestCeilFloor(false /* floor */, self, tmp, test_pairs, arraysize(test_pairs)); - - ShadowFrame::DeleteDeoptimizedFrame(tmp); + TestCeilFloor(false /* floor */, self, tmp.get(), test_pairs, arraysize(test_pairs)); } TEST_F(UnstartedRuntimeTest, ToLowerUpper) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); std::locale c_locale("C"); @@ -749,7 +743,7 @@ TEST_F(UnstartedRuntimeTest, ToLowerUpper) { { JValue result; tmp->SetVReg(0, static_cast<int32_t>(i)); - UnstartedCharacterToLowerCase(self, tmp, &result, 0); + UnstartedCharacterToLowerCase(self, tmp.get(), &result, 0); ASSERT_FALSE(self->IsExceptionPending()); uint32_t lower_result = static_cast<uint32_t>(result.GetI()); if (c_lower) { @@ -766,7 +760,7 @@ TEST_F(UnstartedRuntimeTest, ToLowerUpper) { { JValue result2; tmp->SetVReg(0, static_cast<int32_t>(i)); - UnstartedCharacterToUpperCase(self, tmp, &result2, 0); + UnstartedCharacterToUpperCase(self, tmp.get(), &result2, 0); ASSERT_FALSE(self->IsExceptionPending()); uint32_t upper_result = static_cast<uint32_t>(result2.GetI()); if (c_upper) { @@ -789,7 +783,7 @@ TEST_F(UnstartedRuntimeTest, ToLowerUpper) { JValue result; tmp->SetVReg(0, static_cast<int32_t>(i)); Runtime::Current()->EnterTransactionMode(); - UnstartedCharacterToLowerCase(self, tmp, &result, 0); + UnstartedCharacterToLowerCase(self, tmp.get(), &result, 0); ASSERT_TRUE(Runtime::Current()->IsTransactionAborted()); Runtime::Current()->ExitTransactionMode(); ASSERT_TRUE(self->IsExceptionPending()); @@ -798,7 +792,7 @@ TEST_F(UnstartedRuntimeTest, ToLowerUpper) { JValue result; tmp->SetVReg(0, static_cast<int32_t>(i)); Runtime::Current()->EnterTransactionMode(); - UnstartedCharacterToUpperCase(self, tmp, &result, 0); + UnstartedCharacterToUpperCase(self, tmp.get(), &result, 0); ASSERT_TRUE(Runtime::Current()->IsTransactionAborted()); Runtime::Current()->ExitTransactionMode(); ASSERT_TRUE(self->IsExceptionPending()); @@ -809,7 +803,7 @@ TEST_F(UnstartedRuntimeTest, ToLowerUpper) { JValue result; tmp->SetVReg(0, static_cast<int32_t>(i)); Runtime::Current()->EnterTransactionMode(); - UnstartedCharacterToLowerCase(self, tmp, &result, 0); + UnstartedCharacterToLowerCase(self, tmp.get(), &result, 0); ASSERT_TRUE(Runtime::Current()->IsTransactionAborted()); Runtime::Current()->ExitTransactionMode(); ASSERT_TRUE(self->IsExceptionPending()); @@ -818,59 +812,53 @@ TEST_F(UnstartedRuntimeTest, ToLowerUpper) { JValue result; tmp->SetVReg(0, static_cast<int32_t>(i)); Runtime::Current()->EnterTransactionMode(); - UnstartedCharacterToUpperCase(self, tmp, &result, 0); + UnstartedCharacterToUpperCase(self, tmp.get(), &result, 0); ASSERT_TRUE(Runtime::Current()->IsTransactionAborted()); Runtime::Current()->ExitTransactionMode(); ASSERT_TRUE(self->IsExceptionPending()); } } - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, Sin) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); // Test an important value, PI/6. That's the one we see in practice. constexpr uint64_t lvalue = UINT64_C(0x3fe0c152382d7365); tmp->SetVRegLong(0, static_cast<int64_t>(lvalue)); JValue result; - UnstartedMathSin(self, tmp, &result, 0); + UnstartedMathSin(self, tmp.get(), &result, 0); const uint64_t lresult = static_cast<uint64_t>(result.GetJ()); EXPECT_EQ(UINT64_C(0x3fdfffffffffffff), lresult); - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, Cos) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); // Test an important value, PI/6. That's the one we see in practice. constexpr uint64_t lvalue = UINT64_C(0x3fe0c152382d7365); tmp->SetVRegLong(0, static_cast<int64_t>(lvalue)); JValue result; - UnstartedMathCos(self, tmp, &result, 0); + UnstartedMathCos(self, tmp.get(), &result, 0); const uint64_t lresult = static_cast<uint64_t>(result.GetJ()); EXPECT_EQ(UINT64_C(0x3febb67ae8584cab), lresult); - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, Pow) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); // Test an important pair. constexpr uint64_t lvalue1 = UINT64_C(0x4079000000000000); @@ -880,12 +868,10 @@ TEST_F(UnstartedRuntimeTest, Pow) { tmp->SetVRegLong(2, static_cast<int64_t>(lvalue2)); JValue result; - UnstartedMathPow(self, tmp, &result, 0); + UnstartedMathPow(self, tmp.get(), &result, 0); const uint64_t lresult = static_cast<uint64_t>(result.GetJ()); EXPECT_EQ(UINT64_C(0x3f8c5c51326aa7ee), lresult); - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } TEST_F(UnstartedRuntimeTest, IsAnonymousClass) { @@ -893,11 +879,11 @@ TEST_F(UnstartedRuntimeTest, IsAnonymousClass) { ScopedObjectAccess soa(self); JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, nullptr, 0); ObjPtr<mirror::Class> class_klass = GetClassRoot<mirror::Class>(); shadow_frame->SetVRegReference(0, class_klass); - UnstartedClassIsAnonymousClass(self, shadow_frame, &result, 0); + UnstartedClassIsAnonymousClass(self, shadow_frame.get(), &result, 0); EXPECT_EQ(result.GetZ(), 0); jobject class_loader = LoadDex("Nested"); @@ -907,10 +893,8 @@ TEST_F(UnstartedRuntimeTest, IsAnonymousClass) { ObjPtr<mirror::Class> c = class_linker_->FindClass(soa.Self(), "LNested$1;", loader); ASSERT_TRUE(c != nullptr); shadow_frame->SetVRegReference(0, c); - UnstartedClassIsAnonymousClass(self, shadow_frame, &result, 0); + UnstartedClassIsAnonymousClass(self, shadow_frame.get(), &result, 0); EXPECT_EQ(result.GetZ(), 1); - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } TEST_F(UnstartedRuntimeTest, GetDeclaringClass) { @@ -918,7 +902,7 @@ TEST_F(UnstartedRuntimeTest, GetDeclaringClass) { ScopedObjectAccess soa(self); JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, nullptr, 0); jobject class_loader = LoadDex("Nested"); StackHandleScope<4> hs(self); @@ -933,18 +917,16 @@ TEST_F(UnstartedRuntimeTest, GetDeclaringClass) { class_linker_->FindClass(soa.Self(), "LNested$1;", loader))); shadow_frame->SetVRegReference(0, nested_klass.Get()); - UnstartedClassGetDeclaringClass(self, shadow_frame, &result, 0); + UnstartedClassGetDeclaringClass(self, shadow_frame.get(), &result, 0); EXPECT_EQ(result.GetL(), nullptr); shadow_frame->SetVRegReference(0, inner_klass.Get()); - UnstartedClassGetDeclaringClass(self, shadow_frame, &result, 0); + UnstartedClassGetDeclaringClass(self, shadow_frame.get(), &result, 0); EXPECT_EQ(result.GetL(), nested_klass.Get()); shadow_frame->SetVRegReference(0, anon_klass.Get()); - UnstartedClassGetDeclaringClass(self, shadow_frame, &result, 0); + UnstartedClassGetDeclaringClass(self, shadow_frame.get(), &result, 0); EXPECT_EQ(result.GetL(), nullptr); - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } TEST_F(UnstartedRuntimeTest, ThreadLocalGet) { @@ -952,7 +934,7 @@ TEST_F(UnstartedRuntimeTest, ThreadLocalGet) { ScopedObjectAccess soa(self); JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, nullptr, 0); StackHandleScope<1> hs(self); ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); @@ -974,14 +956,14 @@ TEST_F(UnstartedRuntimeTest, ThreadLocalGet) { ASSERT_TRUE(caller_method != nullptr); ASSERT_TRUE(caller_method->IsDirect()); ASSERT_TRUE(caller_method->GetDeclaringClass() == floating_decimal.Get()); - ShadowFrame* caller_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, caller_method, 0); - shadow_frame->SetLink(caller_frame); + UniqueDeoptShadowFramePtr caller_frame = CreateShadowFrame(10, nullptr, caller_method, 0); + shadow_frame->SetLink(caller_frame.get()); - UnstartedThreadLocalGet(self, shadow_frame, &result, 0); + UnstartedThreadLocalGet(self, shadow_frame.get(), &result, 0); EXPECT_TRUE(result.GetL() != nullptr); EXPECT_FALSE(self->IsExceptionPending()); - ShadowFrame::DeleteDeoptimizedFrame(caller_frame); + shadow_frame->SetLink(nullptr); } // Negative test. @@ -992,20 +974,18 @@ TEST_F(UnstartedRuntimeTest, ThreadLocalGet) { ObjPtr<mirror::Class> class_class = GetClassRoot<mirror::Class>(); ArtMethod* caller_method = &*class_class->GetDeclaredMethods(class_linker->GetImagePointerSize()).begin(); - ShadowFrame* caller_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, caller_method, 0); - shadow_frame->SetLink(caller_frame); + UniqueDeoptShadowFramePtr caller_frame = CreateShadowFrame(10, nullptr, caller_method, 0); + shadow_frame->SetLink(caller_frame.get()); Runtime::Current()->EnterTransactionMode(); - UnstartedThreadLocalGet(self, shadow_frame, &result, 0); + UnstartedThreadLocalGet(self, shadow_frame.get(), &result, 0); ASSERT_TRUE(Runtime::Current()->IsTransactionAborted()); Runtime::Current()->ExitTransactionMode(); ASSERT_TRUE(self->IsExceptionPending()); self->ClearException(); - ShadowFrame::DeleteDeoptimizedFrame(caller_frame); + shadow_frame->SetLink(nullptr); } - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } TEST_F(UnstartedRuntimeTest, FloatConversion) { @@ -1032,7 +1012,8 @@ TEST_F(UnstartedRuntimeTest, FloatConversion) { uint16_t inst_data[3] = { 0x2070, 0x0000, 0x0010 }; JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, method, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, method, 0); + shadow_frame->SetVRegDouble(0, 1.23); interpreter::DoCall<false, false>(method, self, @@ -1045,8 +1026,6 @@ TEST_F(UnstartedRuntimeTest, FloatConversion) { std::string mod_utf = string_result->ToModifiedUtf8(); EXPECT_EQ("1.23", mod_utf); - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } TEST_F(UnstartedRuntimeTest, ThreadCurrentThread) { @@ -1054,7 +1033,7 @@ TEST_F(UnstartedRuntimeTest, ThreadCurrentThread) { ScopedObjectAccess soa(self); JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, nullptr, 0); StackHandleScope<1> hs(self); ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); @@ -1069,14 +1048,12 @@ TEST_F(UnstartedRuntimeTest, ThreadCurrentThread) { { Runtime::Current()->EnterTransactionMode(); - UnstartedThreadCurrentThread(self, shadow_frame, &result, 0); + UnstartedThreadCurrentThread(self, shadow_frame.get(), &result, 0); ASSERT_TRUE(Runtime::Current()->IsTransactionAborted()); Runtime::Current()->ExitTransactionMode(); ASSERT_TRUE(self->IsExceptionPending()); self->ClearException(); } - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } TEST_F(UnstartedRuntimeTest, LogManager) { @@ -1133,7 +1110,7 @@ class UnstartedClassForNameTest : public UnstartedRuntimeTest { } JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, nullptr, 0); for (const char* name : kTestCases) { ObjPtr<mirror::String> name_string = mirror::String::AllocFromModifiedUtf8(self, name); @@ -1144,7 +1121,7 @@ class UnstartedClassForNameTest : public UnstartedRuntimeTest { } CHECK(!self->IsExceptionPending()); - runner(self, shadow_frame, name_string, &result); + runner(self, shadow_frame.get(), name_string, &result); if (should_succeed) { CHECK(!self->IsExceptionPending()) << name << " " << self->GetException()->Dump(); @@ -1161,8 +1138,6 @@ class UnstartedClassForNameTest : public UnstartedRuntimeTest { Runtime::Current()->ExitTransactionMode(); } } - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } mirror::ClassLoader* GetBootClassLoader() REQUIRES_SHARED(Locks::mutator_lock_) { @@ -1189,7 +1164,7 @@ class UnstartedClassForNameTest : public UnstartedRuntimeTest { CHECK(boot_cp_init != nullptr); JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, boot_cp_init, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, boot_cp_init, 0); shadow_frame->SetVRegReference(0, boot_cp.Get()); // create instruction data for invoke-direct {v0} of method with fake index @@ -1202,8 +1177,6 @@ class UnstartedClassForNameTest : public UnstartedRuntimeTest { inst_data[0], &result); CHECK(!self->IsExceptionPending()); - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } return boot_cp.Get(); @@ -1310,15 +1283,13 @@ TEST_F(UnstartedRuntimeTest, ClassGetSignatureAnnotation) { ASSERT_TRUE(class_linker->EnsureInitialized(self, list_class, true, true)); JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, nullptr, 0); shadow_frame->SetVRegReference(0, list_class.Get()); - UnstartedClassGetSignatureAnnotation(self, shadow_frame, &result, 0); + UnstartedClassGetSignatureAnnotation(self, shadow_frame.get(), &result, 0); ASSERT_TRUE(result.GetL() != nullptr); ASSERT_FALSE(self->IsExceptionPending()); - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); - ASSERT_TRUE(result.GetL()->IsObjectArray()); ObjPtr<mirror::ObjectArray<mirror::Object>> array = result.GetL()->AsObjectArray<mirror::Object>(); @@ -1370,10 +1341,10 @@ TEST_F(UnstartedRuntimeTest, ConstructorNewInstance0) { // OK, we're ready now. JValue result; - ShadowFrame* shadow_frame = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr shadow_frame = CreateShadowFrame(10, nullptr, nullptr, 0); shadow_frame->SetVRegReference(0, cons.Get()); shadow_frame->SetVRegReference(1, args.Get()); - UnstartedConstructorNewInstance0(self, shadow_frame, &result, 0); + UnstartedConstructorNewInstance0(self, shadow_frame.get(), &result, 0); ASSERT_TRUE(result.GetL() != nullptr); ASSERT_FALSE(self->IsExceptionPending()); @@ -1386,29 +1357,25 @@ TEST_F(UnstartedRuntimeTest, ConstructorNewInstance0) { ObjPtr<mirror::String> result_msg = reinterpret_cast<mirror::Throwable*>(result.GetL())->GetDetailMessage(); EXPECT_OBJ_PTR_EQ(input.Get(), result_msg); - - ShadowFrame::DeleteDeoptimizedFrame(shadow_frame); } TEST_F(UnstartedRuntimeTest, IdentityHashCode) { Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ShadowFrame* tmp = ShadowFrame::CreateDeoptimizedFrame(10, nullptr, nullptr, 0); + UniqueDeoptShadowFramePtr tmp = CreateShadowFrame(10, nullptr, nullptr, 0); JValue result; - UnstartedSystemIdentityHashCode(self, tmp, &result, 0); + UnstartedSystemIdentityHashCode(self, tmp.get(), &result, 0); EXPECT_EQ(0, result.GetI()); ASSERT_FALSE(self->IsExceptionPending()); ObjPtr<mirror::String> str = mirror::String::AllocFromModifiedUtf8(self, "abd"); tmp->SetVRegReference(0, str); - UnstartedSystemIdentityHashCode(self, tmp, &result, 0); + UnstartedSystemIdentityHashCode(self, tmp.get(), &result, 0); EXPECT_NE(0, result.GetI()); EXPECT_EQ(str->IdentityHashCode(), result.GetI()); ASSERT_FALSE(self->IsExceptionPending()); - - ShadowFrame::DeleteDeoptimizedFrame(tmp); } } // namespace interpreter diff --git a/runtime/check_jni.cc b/runtime/jni/check_jni.cc index a875498f74..7919c32737 100644 --- a/runtime/check_jni.cc +++ b/runtime/jni/check_jni.cc @@ -35,8 +35,8 @@ #include "dex/descriptors_names.h" #include "dex/dex_file-inl.h" #include "gc/space/space.h" -#include "jni/java_vm_ext.h" -#include "jni/jni_internal.h" +#include "java_vm_ext.h" +#include "jni_internal.h" #include "mirror/class-inl.h" #include "mirror/field.h" #include "mirror/method.h" diff --git a/runtime/check_jni.h b/runtime/jni/check_jni.h index f41abf81ce..10fdfe859d 100644 --- a/runtime/check_jni.h +++ b/runtime/jni/check_jni.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ART_RUNTIME_CHECK_JNI_H_ -#define ART_RUNTIME_CHECK_JNI_H_ +#ifndef ART_RUNTIME_JNI_CHECK_JNI_H_ +#define ART_RUNTIME_JNI_CHECK_JNI_H_ #include <jni.h> @@ -26,4 +26,4 @@ const JNIInvokeInterface* GetCheckJniInvokeInterface(); } // namespace art -#endif // ART_RUNTIME_CHECK_JNI_H_ +#endif // ART_RUNTIME_JNI_CHECK_JNI_H_ diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h index ee4f53b695..bb99c0c8d6 100644 --- a/runtime/mirror/object-inl.h +++ b/runtime/mirror/object-inl.h @@ -78,18 +78,6 @@ inline void Object::SetLockWord(LockWord new_val, bool as_volatile) { } } -inline bool Object::CasLockWordWeakSequentiallyConsistent(LockWord old_val, LockWord new_val) { - // Force use of non-transactional mode and do not check. - return CasFieldWeakSequentiallyConsistent32<false, false>( - OFFSET_OF_OBJECT_MEMBER(Object, monitor_), old_val.GetValue(), new_val.GetValue()); -} - -inline bool Object::CasLockWordWeakAcquire(LockWord old_val, LockWord new_val) { - // Force use of non-transactional mode and do not check. - return CasFieldWeakAcquire32<false, false>( - OFFSET_OF_OBJECT_MEMBER(Object, monitor_), old_val.GetValue(), new_val.GetValue()); -} - inline uint32_t Object::GetLockOwnerThreadId() { return Monitor::GetLockOwnerThreadId(this); } @@ -575,84 +563,6 @@ inline void Object::SetField32Transaction(MemberOffset field_offset, int32_t new } } -// TODO: Pass memory_order_ and strong/weak as arguments to avoid code duplication? - -template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags> -inline bool Object::CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) { - if (kCheckTransaction) { - DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); - } - if (kTransactionActive) { - Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true); - } - if (kVerifyFlags & kVerifyThis) { - VerifyObject(this); - } - uint8_t* raw_addr = reinterpret_cast<uint8_t*>(this) + field_offset.Int32Value(); - AtomicInteger* atomic_addr = reinterpret_cast<AtomicInteger*>(raw_addr); - - return atomic_addr->CompareAndSetWeakSequentiallyConsistent(old_value, new_value); -} - -template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags> -inline bool Object::CasFieldWeakAcquire32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) { - if (kCheckTransaction) { - DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); - } - if (kTransactionActive) { - Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true); - } - if (kVerifyFlags & kVerifyThis) { - VerifyObject(this); - } - uint8_t* raw_addr = reinterpret_cast<uint8_t*>(this) + field_offset.Int32Value(); - AtomicInteger* atomic_addr = reinterpret_cast<AtomicInteger*>(raw_addr); - - return atomic_addr->CompareAndSetWeakAcquire(old_value, new_value); -} - -template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags> -inline bool Object::CasFieldWeakRelease32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) { - if (kCheckTransaction) { - DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); - } - if (kTransactionActive) { - Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true); - } - if (kVerifyFlags & kVerifyThis) { - VerifyObject(this); - } - uint8_t* raw_addr = reinterpret_cast<uint8_t*>(this) + field_offset.Int32Value(); - AtomicInteger* atomic_addr = reinterpret_cast<AtomicInteger*>(raw_addr); - - return atomic_addr->CompareAndSetWeakRelease(old_value, new_value); -} - -template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags> -inline bool Object::CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) { - if (kCheckTransaction) { - DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); - } - if (kTransactionActive) { - Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true); - } - if (kVerifyFlags & kVerifyThis) { - VerifyObject(this); - } - uint8_t* raw_addr = reinterpret_cast<uint8_t*>(this) + field_offset.Int32Value(); - AtomicInteger* atomic_addr = reinterpret_cast<AtomicInteger*>(raw_addr); - - return atomic_addr->CompareAndSetStrongSequentiallyConsistent(old_value, new_value); -} - template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags, diff --git a/runtime/mirror/object-readbarrier-inl.h b/runtime/mirror/object-readbarrier-inl.h index aeaa850abe..597ba670c9 100644 --- a/runtime/mirror/object-readbarrier-inl.h +++ b/runtime/mirror/object-readbarrier-inl.h @@ -32,14 +32,17 @@ namespace mirror { template<VerifyObjectFlags kVerifyFlags> inline LockWord Object::GetLockWord(bool as_volatile) { if (as_volatile) { - return LockWord(GetField32Volatile<kVerifyFlags>(OFFSET_OF_OBJECT_MEMBER(Object, monitor_))); + return LockWord(GetField32Volatile<kVerifyFlags>(MonitorOffset())); } - return LockWord(GetField32<kVerifyFlags>(OFFSET_OF_OBJECT_MEMBER(Object, monitor_))); + return LockWord(GetField32<kVerifyFlags>(MonitorOffset())); } template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags> -inline bool Object::CasFieldWeakRelaxed32(MemberOffset field_offset, - int32_t old_value, int32_t new_value) { +inline bool Object::CasField32(MemberOffset field_offset, + int32_t old_value, + int32_t new_value, + CASMode mode, + std::memory_order memory_order) { if (kCheckTransaction) { DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction()); } @@ -52,19 +55,19 @@ inline bool Object::CasFieldWeakRelaxed32(MemberOffset field_offset, uint8_t* raw_addr = reinterpret_cast<uint8_t*>(this) + field_offset.Int32Value(); AtomicInteger* atomic_addr = reinterpret_cast<AtomicInteger*>(raw_addr); - return atomic_addr->CompareAndSetWeakRelaxed(old_value, new_value); + return atomic_addr->CompareAndSet(old_value, new_value, mode, memory_order); } -inline bool Object::CasLockWordWeakRelaxed(LockWord old_val, LockWord new_val) { +inline bool Object::CasLockWord(LockWord old_val, + LockWord new_val, + CASMode mode, + std::memory_order memory_order) { // Force use of non-transactional mode and do not check. - return CasFieldWeakRelaxed32<false, false>( - OFFSET_OF_OBJECT_MEMBER(Object, monitor_), old_val.GetValue(), new_val.GetValue()); -} - -inline bool Object::CasLockWordWeakRelease(LockWord old_val, LockWord new_val) { - // Force use of non-transactional mode and do not check. - return CasFieldWeakRelease32<false, false>( - OFFSET_OF_OBJECT_MEMBER(Object, monitor_), old_val.GetValue(), new_val.GetValue()); + return CasField32<false, false>(MonitorOffset(), + old_val.GetValue(), + new_val.GetValue(), + mode, + memory_order); } inline uint32_t Object::GetReadBarrierState(uintptr_t* fake_address_dependency) { @@ -145,7 +148,7 @@ inline uint32_t Object::GetReadBarrierStateAcquire() { return rb_state; } -template<bool kCasRelease> +template<std::memory_order kMemoryOrder> inline bool Object::AtomicSetReadBarrierState(uint32_t expected_rb_state, uint32_t rb_state) { if (!kUseBakerReadBarrier) { LOG(FATAL) << "Unreachable"; @@ -169,9 +172,7 @@ inline bool Object::AtomicSetReadBarrierState(uint32_t expected_rb_state, uint32 // If kCasRelease == true, use a CAS release so that when GC updates all the fields of // an object and then changes the object from gray to black, the field updates (stores) will be // visible (won't be reordered after this CAS.) - } while (!(kCasRelease ? - CasLockWordWeakRelease(expected_lw, new_lw) : - CasLockWordWeakRelaxed(expected_lw, new_lw))); + } while (!CasLockWord(expected_lw, new_lw, CASMode::kWeak, kMemoryOrder)); return true; } @@ -188,7 +189,7 @@ inline bool Object::AtomicSetMarkBit(uint32_t expected_mark_bit, uint32_t mark_b new_lw = lw; new_lw.SetMarkBitState(mark_bit); // Since this is only set from the mutator, we can use the non-release CAS. - } while (!CasLockWordWeakRelaxed(expected_lw, new_lw)); + } while (!CasLockWord(expected_lw, new_lw, CASMode::kWeak, std::memory_order_relaxed)); return true; } diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc index 4240e702b5..ce845bfa7d 100644 --- a/runtime/mirror/object.cc +++ b/runtime/mirror/object.cc @@ -197,7 +197,9 @@ int32_t Object::IdentityHashCode() { // loop iteration. LockWord hash_word = LockWord::FromHashCode(GenerateIdentityHashCode(), lw.GCState()); DCHECK_EQ(hash_word.GetState(), LockWord::kHashCode); - if (current_this->CasLockWordWeakRelaxed(lw, hash_word)) { + // Use a strong CAS to prevent spurious failures since these can make the boot image + // non-deterministic. + if (current_this->CasLockWord(lw, hash_word, CASMode::kStrong, std::memory_order_relaxed)) { return hash_word.GetHashCode(); } break; diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h index a89d6323a5..654fe95d9d 100644 --- a/runtime/mirror/object.h +++ b/runtime/mirror/object.h @@ -111,7 +111,7 @@ class MANAGED LOCKABLE Object { #endif ALWAYS_INLINE void SetReadBarrierState(uint32_t rb_state) REQUIRES_SHARED(Locks::mutator_lock_); - template<bool kCasRelease = false> + template<std::memory_order kMemoryOrder = std::memory_order_relaxed> ALWAYS_INLINE bool AtomicSetReadBarrierState(uint32_t expected_rb_state, uint32_t rb_state) REQUIRES_SHARED(Locks::mutator_lock_); @@ -151,13 +151,7 @@ class MANAGED LOCKABLE Object { LockWord GetLockWord(bool as_volatile) REQUIRES_SHARED(Locks::mutator_lock_); template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> void SetLockWord(LockWord new_val, bool as_volatile) REQUIRES_SHARED(Locks::mutator_lock_); - bool CasLockWordWeakSequentiallyConsistent(LockWord old_val, LockWord new_val) - REQUIRES_SHARED(Locks::mutator_lock_); - bool CasLockWordWeakRelaxed(LockWord old_val, LockWord new_val) - REQUIRES_SHARED(Locks::mutator_lock_); - bool CasLockWordWeakAcquire(LockWord old_val, LockWord new_val) - REQUIRES_SHARED(Locks::mutator_lock_); - bool CasLockWordWeakRelease(LockWord old_val, LockWord new_val) + bool CasLockWord(LockWord old_val, LockWord new_val, CASMode mode, std::memory_order memory_order) REQUIRES_SHARED(Locks::mutator_lock_); uint32_t GetLockOwnerThreadId(); @@ -523,41 +517,11 @@ class MANAGED LOCKABLE Object { template<bool kTransactionActive, bool kCheckTransaction = true, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> - ALWAYS_INLINE bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) - REQUIRES_SHARED(Locks::mutator_lock_); - - template<bool kTransactionActive, - bool kCheckTransaction = true, - VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> - ALWAYS_INLINE bool CasFieldWeakRelaxed32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) - REQUIRES_SHARED(Locks::mutator_lock_); - - template<bool kTransactionActive, - bool kCheckTransaction = true, - VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> - ALWAYS_INLINE bool CasFieldWeakAcquire32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) - REQUIRES_SHARED(Locks::mutator_lock_); - - template<bool kTransactionActive, - bool kCheckTransaction = true, - VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> - ALWAYS_INLINE bool CasFieldWeakRelease32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) - REQUIRES_SHARED(Locks::mutator_lock_); - - template<bool kTransactionActive, - bool kCheckTransaction = true, - VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> - ALWAYS_INLINE bool CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset, - int32_t old_value, - int32_t new_value) + ALWAYS_INLINE bool CasField32(MemberOffset field_offset, + int32_t old_value, + int32_t new_value, + CASMode mode, + std::memory_order memory_order) REQUIRES_SHARED(Locks::mutator_lock_); template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false> diff --git a/runtime/monitor.cc b/runtime/monitor.cc index 2c38de5dae..d47bc0d12e 100644 --- a/runtime/monitor.cc +++ b/runtime/monitor.cc @@ -134,13 +134,15 @@ Monitor::Monitor(Thread* self, Thread* owner, mirror::Object* obj, int32_t hash_ } int32_t Monitor::GetHashCode() { - while (!HasHashCode()) { - if (hash_code_.CompareAndSetWeakRelaxed(0, mirror::Object::GenerateIdentityHashCode())) { - break; - } + int32_t hc = hash_code_.load(std::memory_order_relaxed); + if (!HasHashCode()) { + // Use a strong CAS to prevent spurious failures since these can make the boot image + // non-deterministic. + hash_code_.CompareAndSetStrongRelaxed(0, mirror::Object::GenerateIdentityHashCode()); + hc = hash_code_.load(std::memory_order_relaxed); } DCHECK(HasHashCode()); - return hash_code_.load(std::memory_order_relaxed); + return hc; } bool Monitor::Install(Thread* self) { @@ -173,7 +175,7 @@ bool Monitor::Install(Thread* self) { } LockWord fat(this, lw.GCState()); // Publish the updated lock word, which may race with other threads. - bool success = GetObject()->CasLockWordWeakRelease(lw, fat); + bool success = GetObject()->CasLockWord(lw, fat, CASMode::kWeak, std::memory_order_release); // Lock profiling. if (success && owner_ != nullptr && lock_profiling_threshold_ != 0) { // Do not abort on dex pc errors. This can easily happen when we want to dump a stack trace on @@ -1039,7 +1041,7 @@ mirror::Object* Monitor::MonitorEnter(Thread* self, mirror::Object* obj, bool tr case LockWord::kUnlocked: { // No ordering required for preceding lockword read, since we retest. LockWord thin_locked(LockWord::FromThinLockId(thread_id, 0, lock_word.GCState())); - if (h_obj->CasLockWordWeakAcquire(lock_word, thin_locked)) { + if (h_obj->CasLockWord(lock_word, thin_locked, CASMode::kWeak, std::memory_order_acquire)) { AtraceMonitorLock(self, h_obj.Get(), false /* is_wait */); return h_obj.Get(); // Success! } @@ -1063,7 +1065,10 @@ mirror::Object* Monitor::MonitorEnter(Thread* self, mirror::Object* obj, bool tr return h_obj.Get(); // Success! } else { // Use CAS to preserve the read barrier state. - if (h_obj->CasLockWordWeakRelaxed(lock_word, thin_locked)) { + if (h_obj->CasLockWord(lock_word, + thin_locked, + CASMode::kWeak, + std::memory_order_relaxed)) { AtraceMonitorLock(self, h_obj.Get(), false /* is_wait */); return h_obj.Get(); // Success! } @@ -1165,7 +1170,7 @@ bool Monitor::MonitorExit(Thread* self, mirror::Object* obj) { return true; } else { // Use CAS to preserve the read barrier state. - if (h_obj->CasLockWordWeakRelease(lock_word, new_lw)) { + if (h_obj->CasLockWord(lock_word, new_lw, CASMode::kWeak, std::memory_order_release)) { AtraceMonitorUnlock(); // Success! return true; diff --git a/runtime/native/sun_misc_Unsafe.cc b/runtime/native/sun_misc_Unsafe.cc index d41a19556e..0f474d3c9d 100644 --- a/runtime/native/sun_misc_Unsafe.cc +++ b/runtime/native/sun_misc_Unsafe.cc @@ -41,9 +41,11 @@ static jboolean Unsafe_compareAndSwapInt(JNIEnv* env, jobject, jobject javaObj, ScopedFastNativeObjectAccess soa(env); ObjPtr<mirror::Object> obj = soa.Decode<mirror::Object>(javaObj); // JNI must use non transactional mode. - bool success = obj->CasFieldStrongSequentiallyConsistent32<false>(MemberOffset(offset), - expectedValue, - newValue); + bool success = obj->CasField32<false>(MemberOffset(offset), + expectedValue, + newValue, + CASMode::kStrong, + std::memory_order_seq_cst); return success ? JNI_TRUE : JNI_FALSE; } diff --git a/runtime/runtime_callbacks_test.cc b/runtime/runtime_callbacks_test.cc index 54769f9c49..794ac19c4b 100644 --- a/runtime/runtime_callbacks_test.cc +++ b/runtime/runtime_callbacks_test.cc @@ -339,9 +339,6 @@ class RuntimeSigQuitCallbackRuntimeCallbacksTest : public RuntimeCallbacksTest { }; TEST_F(RuntimeSigQuitCallbackRuntimeCallbacksTest, SigQuit) { - // SigQuit induces a dump. ASan isn't happy with libunwind reading memory. - TEST_DISABLED_FOR_MEMORY_TOOL(); - // The runtime needs to be started for the signal handler. Thread* self = Thread::Current(); diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc index 43609e80bd..f2418d0f1a 100644 --- a/runtime/stack_map.cc +++ b/runtime/stack_map.cc @@ -91,7 +91,7 @@ void CodeInfo::DecodeDexRegisterMap(uint32_t stack_map_index, template<typename Accessor> static void AddTableSizeStats(const char* table_name, - const BitTable<Accessor::kCount>& table, + const BitTable<Accessor>& table, /*out*/ Stats* parent) { Stats* table_stats = parent->Child(table_name); table_stats->AddBits(table.BitSize()); @@ -135,7 +135,7 @@ void DexRegisterMap::Dump(VariableIndentationOutputStream* vios) const { template<typename Accessor> static void DumpTable(VariableIndentationOutputStream* vios, const char* table_name, - const BitTable<Accessor::kCount>& table, + const BitTable<Accessor>& table, bool verbose, bool is_mask = false) { if (table.NumRows() != 0) { diff --git a/runtime/stack_map.h b/runtime/stack_map.h index 8af73e9e10..64a084f1c2 100644 --- a/runtime/stack_map.h +++ b/runtime/stack_map.h @@ -119,7 +119,7 @@ class DexRegisterMap { * - Knowing the inlining information, * - Knowing the values of dex registers. */ -class StackMap : public BitTable<8>::Accessor { +class StackMap : public BitTableAccessor<8> { public: enum Kind { Default = -1, @@ -138,7 +138,7 @@ class StackMap : public BitTable<8>::Accessor { BIT_TABLE_COLUMN(7, DexRegisterMapIndex) ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const { - return UnpackNativePc(Get<kPackedNativePc>(), instruction_set); + return UnpackNativePc(GetPackedNativePc(), instruction_set); } ALWAYS_INLINE bool HasInlineInfo() const { @@ -172,7 +172,7 @@ class StackMap : public BitTable<8>::Accessor { * The row referenced from the StackMap holds information at depth 0. * Following rows hold information for further depths. */ -class InlineInfo : public BitTable<6>::Accessor { +class InlineInfo : public BitTableAccessor<6> { public: BIT_TABLE_HEADER() BIT_TABLE_COLUMN(0, IsLast) // Determines if there are further rows for further depths. @@ -206,7 +206,7 @@ class InlineInfo : public BitTable<6>::Accessor { const MethodInfo& method_info) const; }; -class InvokeInfo : public BitTable<3>::Accessor { +class InvokeInfo : public BitTableAccessor<3> { public: BIT_TABLE_HEADER() BIT_TABLE_COLUMN(0, PackedNativePc) @@ -214,7 +214,7 @@ class InvokeInfo : public BitTable<3>::Accessor { BIT_TABLE_COLUMN(2, MethodInfoIndex) ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const { - return StackMap::UnpackNativePc(Get<kPackedNativePc>(), instruction_set); + return StackMap::UnpackNativePc(GetPackedNativePc(), instruction_set); } uint32_t GetMethodIndex(MethodInfo method_info) const { @@ -222,19 +222,19 @@ class InvokeInfo : public BitTable<3>::Accessor { } }; -class MaskInfo : public BitTable<1>::Accessor { +class MaskInfo : public BitTableAccessor<1> { public: BIT_TABLE_HEADER() BIT_TABLE_COLUMN(0, Mask) }; -class DexRegisterMapInfo : public BitTable<1>::Accessor { +class DexRegisterMapInfo : public BitTableAccessor<1> { public: BIT_TABLE_HEADER() BIT_TABLE_COLUMN(0, CatalogueIndex) }; -class DexRegisterInfo : public BitTable<2>::Accessor { +class DexRegisterInfo : public BitTableAccessor<2> { public: BIT_TABLE_HEADER() BIT_TABLE_COLUMN(0, Kind) @@ -265,7 +265,7 @@ class DexRegisterInfo : public BitTable<2>::Accessor { // Register masks tend to have many trailing zero bits (caller-saves are usually not encoded), // therefore it is worth encoding the mask as value+shift. -class RegisterMask : public BitTable<2>::Accessor { +class RegisterMask : public BitTableAccessor<2> { public: BIT_TABLE_HEADER() BIT_TABLE_COLUMN(0, Value) @@ -303,7 +303,7 @@ class CodeInfo { } ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const { - return StackMap(&stack_maps_, index); + return stack_maps_.GetRow(index); } BitMemoryRegion GetStackMask(size_t index) const { @@ -317,7 +317,7 @@ class CodeInfo { uint32_t GetRegisterMaskOf(const StackMap& stack_map) const { uint32_t index = stack_map.GetRegisterMaskIndex(); - return (index == StackMap::kNoValue) ? 0 : RegisterMask(®ister_masks_, index).GetMask(); + return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask(); } uint32_t GetNumberOfLocationCatalogEntries() const { @@ -327,7 +327,7 @@ class CodeInfo { ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const { return (index == StackMap::kNoValue) ? DexRegisterLocation::None() - : DexRegisterInfo(&dex_register_catalog_, index).GetLocation(); + : dex_register_catalog_.GetRow(index).GetLocation(); } uint32_t GetNumberOfStackMaps() const { @@ -335,7 +335,7 @@ class CodeInfo { } InvokeInfo GetInvokeInfo(size_t index) const { - return InvokeInfo(&invoke_infos_, index); + return invoke_infos_.GetRow(index); } ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const { @@ -363,7 +363,7 @@ class CodeInfo { } InlineInfo GetInlineInfo(size_t index) const { - return InlineInfo(&inline_infos_, index); + return inline_infos_.GetRow(index); } uint32_t GetInlineDepthOf(StackMap stack_map) const { @@ -473,14 +473,14 @@ class CodeInfo { } size_t size_; - BitTable<StackMap::kCount> stack_maps_; - BitTable<RegisterMask::kCount> register_masks_; - BitTable<MaskInfo::kCount> stack_masks_; - BitTable<InvokeInfo::kCount> invoke_infos_; - BitTable<InlineInfo::kCount> inline_infos_; - BitTable<MaskInfo::kCount> dex_register_masks_; - BitTable<DexRegisterMapInfo::kCount> dex_register_maps_; - BitTable<DexRegisterInfo::kCount> dex_register_catalog_; + BitTable<StackMap> stack_maps_; + BitTable<RegisterMask> register_masks_; + BitTable<MaskInfo> stack_masks_; + BitTable<InvokeInfo> invoke_infos_; + BitTable<InlineInfo> inline_infos_; + BitTable<MaskInfo> dex_register_masks_; + BitTable<DexRegisterMapInfo> dex_register_maps_; + BitTable<DexRegisterInfo> dex_register_catalog_; uint32_t number_of_dex_registers_; // Excludes any inlined methods. }; diff --git a/runtime/subtype_check.h b/runtime/subtype_check.h index 1fe62e8f46..aac547eb78 100644 --- a/runtime/subtype_check.h +++ b/runtime/subtype_check.h @@ -542,15 +542,17 @@ struct SubtypeCheck { int32_t new_value) REQUIRES_SHARED(Locks::mutator_lock_) { if (Runtime::Current() != nullptr && Runtime::Current()->IsActiveTransaction()) { - return klass->template - CasFieldWeakSequentiallyConsistent32</*kTransactionActive*/true>(offset, - old_value, - new_value); + return klass->template CasField32</*kTransactionActive*/true>(offset, + old_value, + new_value, + CASMode::kWeak, + std::memory_order_seq_cst); } else { - return klass->template - CasFieldWeakSequentiallyConsistent32</*kTransactionActive*/false>(offset, - old_value, - new_value); + return klass->template CasField32</*kTransactionActive*/false>(offset, + old_value, + new_value, + CASMode::kWeak, + std::memory_order_seq_cst); } } diff --git a/runtime/subtype_check_info_test.cc b/runtime/subtype_check_info_test.cc index 91fcc07d65..e40bca57fe 100644 --- a/runtime/subtype_check_info_test.cc +++ b/runtime/subtype_check_info_test.cc @@ -121,11 +121,11 @@ struct SubtypeCheckInfoTest : public ::testing::Test { return SubtypeCheckInfo::MakeUnchecked(bs, overflow, depth); } - static bool HasNext(SubtypeCheckInfo io) { + static bool HasNext(const SubtypeCheckInfo& io) { return io.HasNext(); } - static BitString GetPathToRoot(SubtypeCheckInfo io) { + static BitString GetPathToRoot(const SubtypeCheckInfo& io) { return io.GetPathToRoot(); } diff --git a/runtime/subtype_check_test.cc b/runtime/subtype_check_test.cc index e297d0beb4..666bf812f5 100644 --- a/runtime/subtype_check_test.cc +++ b/runtime/subtype_check_test.cc @@ -86,9 +86,11 @@ struct MockClass { } template <bool kTransactionActive> - bool CasFieldWeakSequentiallyConsistent32(art::MemberOffset offset, - int32_t old_value, - int32_t new_value) + bool CasField32(art::MemberOffset offset, + int32_t old_value, + int32_t new_value, + CASMode mode ATTRIBUTE_UNUSED, + std::memory_order memory_order ATTRIBUTE_UNUSED) REQUIRES_SHARED(Locks::mutator_lock_) { UNUSED(offset); if (old_value == GetField32Volatile(offset)) { @@ -652,13 +654,15 @@ void EnsureStateChangedTestRecursive( MockClass* klass, size_t cur_depth, size_t total_depth, - std::vector<std::pair<SubtypeCheckInfo::State, SubtypeCheckInfo::State>> transitions) { + const std::vector<std::pair<SubtypeCheckInfo::State, SubtypeCheckInfo::State>>& transitions) { MockScopedLockSubtypeCheck lock_a; MockScopedLockMutator lock_b; using SCTree = MockSubtypeCheck; ASSERT_EQ(cur_depth, klass->Depth()); - ApplyTransition(SCTree::Lookup(klass), transitions[cur_depth].first, transitions[cur_depth].second); + ApplyTransition(SCTree::Lookup(klass), + transitions[cur_depth].first, + transitions[cur_depth].second); if (total_depth == cur_depth + 1) { return; @@ -674,7 +678,7 @@ void EnsureStateChangedTestRecursive( void EnsureStateChangedTest( MockClass* root, size_t depth, - std::vector<std::pair<SubtypeCheckInfo::State, SubtypeCheckInfo::State>> transitions) { + const std::vector<std::pair<SubtypeCheckInfo::State, SubtypeCheckInfo::State>>& transitions) { ASSERT_EQ(depth, transitions.size()); EnsureStateChangedTestRecursive(root, /*cur_depth*/0u, depth, transitions); diff --git a/runtime/thread.cc b/runtime/thread.cc index 2f641dfa72..3c5569fe05 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -605,7 +605,7 @@ void Thread::InstallImplicitProtection() { 1u; #endif volatile char space[kPageSize - (kAsanMultiplier * 256)]; - char sink ATTRIBUTE_UNUSED = space[zero]; + char sink ATTRIBUTE_UNUSED = space[zero]; // NOLINT if (reinterpret_cast<uintptr_t>(space) >= target + kPageSize) { Touch(target); } diff --git a/runtime/verifier/method_verifier.cc b/runtime/verifier/method_verifier.cc index 61ddded9f2..47877bd195 100644 --- a/runtime/verifier/method_verifier.cc +++ b/runtime/verifier/method_verifier.cc @@ -3839,6 +3839,8 @@ ArtMethod* MethodVerifier::ResolveMethodAndCheckAccess( template <class T> ArtMethod* MethodVerifier::VerifyInvocationArgsFromIterator( T* it, const Instruction* inst, MethodType method_type, bool is_range, ArtMethod* res_method) { + DCHECK_EQ(!is_range, inst->HasVarArgs()); + // We use vAA as our expected arg count, rather than res_method->insSize, because we need to // match the call to the signature. Also, we might be calling through an abstract method // definition (which doesn't have register count values). diff --git a/sigchainlib/sigchain_test.cc b/sigchainlib/sigchain_test.cc index 1d1e54f127..9584ded65f 100644 --- a/sigchainlib/sigchain_test.cc +++ b/sigchainlib/sigchain_test.cc @@ -70,7 +70,7 @@ class SigchainTest : public ::testing::Test { }; -static void TestSignalBlocking(std::function<void()> fn) { +static void TestSignalBlocking(const std::function<void()>& fn) { // Unblock SIGSEGV, make sure it stays unblocked. sigset64_t mask; sigemptyset64(&mask); diff --git a/test/661-checker-simd-reduc/build b/test/661-checker-simd-reduc/build deleted file mode 100644 index d85147f17b..0000000000 --- a/test/661-checker-simd-reduc/build +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# -# Copyright 2018 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# See b/65168732 -export USE_D8=false - -./default-build "$@" diff --git a/test/661-checker-simd-reduc/src/Main.java b/test/661-checker-simd-reduc/src/Main.java index eff2018078..7b6f957b2a 100644 --- a/test/661-checker-simd-reduc/src/Main.java +++ b/test/661-checker-simd-reduc/src/Main.java @@ -55,8 +55,8 @@ public class Main { /// CHECK-START: int Main.reductionInt(int[]) loop_optimization (before) /// CHECK-DAG: <<Cons0:i\d+>> IntConstant 0 loop:none /// CHECK-DAG: <<Cons1:i\d+>> IntConstant 1 loop:none - /// CHECK-DAG: <<Phi1:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop:B\d+>> outer_loop:none - /// CHECK-DAG: <<Phi2:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop>> outer_loop:none + /// CHECK-DAG: <<Phi2:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop:B\d+>> outer_loop:none + /// CHECK-DAG: <<Phi1:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop>> outer_loop:none /// CHECK-DAG: <<Get:i\d+>> ArrayGet [{{l\d+}},<<Phi1>>] loop:<<Loop>> outer_loop:none /// CHECK-DAG: Add [<<Phi2>>,<<Get>>] loop:<<Loop>> outer_loop:none /// CHECK-DAG: Add [<<Phi1>>,<<Cons1>>] loop:<<Loop>> outer_loop:none @@ -130,8 +130,8 @@ public class Main { /// CHECK-START: int Main.reductionIntToLoop(int[]) loop_optimization (before) /// CHECK-DAG: <<Cons0:i\d+>> IntConstant 0 loop:none /// CHECK-DAG: <<Cons1:i\d+>> IntConstant 1 loop:none - /// CHECK-DAG: <<Phi1:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop1:B\d+>> outer_loop:none - /// CHECK-DAG: <<Phi2:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop1>> outer_loop:none + /// CHECK-DAG: <<Phi2:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop1:B\d+>> outer_loop:none + /// CHECK-DAG: <<Phi1:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop1>> outer_loop:none /// CHECK-DAG: <<Get:i\d+>> ArrayGet [{{l\d+}},<<Phi1>>] loop:<<Loop1>> outer_loop:none /// CHECK-DAG: Add [<<Phi2>>,<<Get>>] loop:<<Loop1>> outer_loop:none /// CHECK-DAG: Add [<<Phi1>>,<<Cons1>>] loop:<<Loop1>> outer_loop:none @@ -295,8 +295,8 @@ public class Main { /// CHECK-START: int Main.reductionMinusInt(int[]) loop_optimization (before) /// CHECK-DAG: <<Cons0:i\d+>> IntConstant 0 loop:none /// CHECK-DAG: <<Cons1:i\d+>> IntConstant 1 loop:none - /// CHECK-DAG: <<Phi1:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop:B\d+>> outer_loop:none - /// CHECK-DAG: <<Phi2:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop>> outer_loop:none + /// CHECK-DAG: <<Phi2:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop:B\d+>> outer_loop:none + /// CHECK-DAG: <<Phi1:i\d+>> Phi [<<Cons0>>,{{i\d+}}] loop:<<Loop>> outer_loop:none /// CHECK-DAG: <<Get:i\d+>> ArrayGet [{{l\d+}},<<Phi1>>] loop:<<Loop>> outer_loop:none /// CHECK-DAG: Sub [<<Phi2>>,<<Get>>] loop:<<Loop>> outer_loop:none /// CHECK-DAG: Add [<<Phi1>>,<<Cons1>>] loop:<<Loop>> outer_loop:none diff --git a/test/672-checker-throw-method/build b/test/672-checker-throw-method/build deleted file mode 100644 index d85147f17b..0000000000 --- a/test/672-checker-throw-method/build +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# -# Copyright 2018 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# See b/65168732 -export USE_D8=false - -./default-build "$@" diff --git a/test/672-checker-throw-method/src/Main.java b/test/672-checker-throw-method/src/Main.java index a507133b91..360b52c79d 100644 --- a/test/672-checker-throw-method/src/Main.java +++ b/test/672-checker-throw-method/src/Main.java @@ -51,7 +51,7 @@ public class Main { /// CHECK-START: void Main.doit1(int[]) code_sinking (before) /// CHECK: begin_block /// CHECK: <<Str:l\d+>> LoadString - /// CHECK: <<Tst:z\d+>> NotEqual + /// CHECK: <<Tst:z\d+>> Equal /// CHECK: If [<<Tst>>] /// CHECK: end_block /// CHECK: begin_block @@ -61,7 +61,7 @@ public class Main { // /// CHECK-START: void Main.doit1(int[]) code_sinking (after) /// CHECK: begin_block - /// CHECK: <<Tst:z\d+>> NotEqual + /// CHECK: <<Tst:z\d+>> Equal /// CHECK: If [<<Tst>>] /// CHECK: end_block /// CHECK: begin_block @@ -109,7 +109,7 @@ public class Main { /// CHECK-START: void Main.doit3(int[]) code_sinking (before) /// CHECK: begin_block /// CHECK: <<Str:l\d+>> LoadString - /// CHECK: <<Tst:z\d+>> NotEqual + /// CHECK: <<Tst:z\d+>> Equal /// CHECK: If [<<Tst>>] /// CHECK: end_block /// CHECK: begin_block @@ -119,7 +119,7 @@ public class Main { // /// CHECK-START: void Main.doit3(int[]) code_sinking (after) /// CHECK: begin_block - /// CHECK: <<Tst:z\d+>> NotEqual + /// CHECK: <<Tst:z\d+>> Equal /// CHECK: If [<<Tst>>] /// CHECK: end_block /// CHECK: begin_block diff --git a/test/673-checker-throw-vmethod/build b/test/673-checker-throw-vmethod/build deleted file mode 100644 index d85147f17b..0000000000 --- a/test/673-checker-throw-vmethod/build +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# -# Copyright 2018 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# See b/65168732 -export USE_D8=false - -./default-build "$@" diff --git a/test/673-checker-throw-vmethod/src/Main.java b/test/673-checker-throw-vmethod/src/Main.java index d0e1591bdb..206dfaf330 100644 --- a/test/673-checker-throw-vmethod/src/Main.java +++ b/test/673-checker-throw-vmethod/src/Main.java @@ -45,7 +45,7 @@ public class Main { /// CHECK-START: void Main.doit1(int[]) code_sinking (before) /// CHECK: begin_block /// CHECK: <<Str:l\d+>> LoadString - /// CHECK: <<Tst:z\d+>> NotEqual + /// CHECK: <<Tst:z\d+>> Equal /// CHECK: If [<<Tst>>] /// CHECK: end_block /// CHECK: begin_block @@ -55,7 +55,7 @@ public class Main { // /// CHECK-START: void Main.doit1(int[]) code_sinking (after) /// CHECK: begin_block - /// CHECK: <<Tst:z\d+>> NotEqual + /// CHECK: <<Tst:z\d+>> Equal /// CHECK: If [<<Tst>>] /// CHECK: end_block /// CHECK: begin_block @@ -103,7 +103,7 @@ public class Main { /// CHECK-START: void Main.doit3(int[]) code_sinking (before) /// CHECK: begin_block /// CHECK: <<Str:l\d+>> LoadString - /// CHECK: <<Tst:z\d+>> NotEqual + /// CHECK: <<Tst:z\d+>> Equal /// CHECK: If [<<Tst>>] /// CHECK: end_block /// CHECK: begin_block @@ -113,7 +113,7 @@ public class Main { // /// CHECK-START: void Main.doit3(int[]) code_sinking (after) /// CHECK: begin_block - /// CHECK: <<Tst:z\d+>> NotEqual + /// CHECK: <<Tst:z\d+>> Equal /// CHECK: If [<<Tst>>] /// CHECK: end_block /// CHECK: begin_block diff --git a/test/706-checker-scheduler/build b/test/706-checker-scheduler/build deleted file mode 100644 index d85147f17b..0000000000 --- a/test/706-checker-scheduler/build +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# -# Copyright 2018 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# See b/65168732 -export USE_D8=false - -./default-build "$@" diff --git a/test/706-checker-scheduler/src/Main.java b/test/706-checker-scheduler/src/Main.java index 25e4fad714..eb985d0032 100644 --- a/test/706-checker-scheduler/src/Main.java +++ b/test/706-checker-scheduler/src/Main.java @@ -35,8 +35,8 @@ public class Main { /// CHECK-START-ARM64: int Main.arrayAccess() scheduler (before) /// CHECK: <<Const1:i\d+>> IntConstant 1 - /// CHECK: <<i0:i\d+>> Phi /// CHECK: <<res0:i\d+>> Phi + /// CHECK: <<i0:i\d+>> Phi /// CHECK: <<Array:i\d+>> IntermediateAddress /// CHECK: <<ArrayGet1:i\d+>> ArrayGet [<<Array>>,<<i0>>] /// CHECK: <<res1:i\d+>> Add [<<res0>>,<<ArrayGet1>>] @@ -46,8 +46,8 @@ public class Main { /// CHECK-START-ARM64: int Main.arrayAccess() scheduler (after) /// CHECK: <<Const1:i\d+>> IntConstant 1 - /// CHECK: <<i0:i\d+>> Phi /// CHECK: <<res0:i\d+>> Phi + /// CHECK: <<i0:i\d+>> Phi /// CHECK: <<Array:i\d+>> IntermediateAddress /// CHECK: <<ArrayGet1:i\d+>> ArrayGet [<<Array>>,<<i0>>] /// CHECK: <<i1:i\d+>> Add [<<i0>>,<<Const1>>] diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk index f8bebdd35f..ad292fd0a7 100644 --- a/test/Android.run-test.mk +++ b/test/Android.run-test.mk @@ -140,8 +140,11 @@ $(foreach target, $(TARGET_TYPES), \ $(call core-image-dependencies,$(target),$(image),$(compiler),$(address_size))))))) test-art-host-run-test-dependencies : $(host_prereq_rules) +.PHONY: test-art-host-run-test-dependencies test-art-target-run-test-dependencies : $(target_prereq_rules) +.PHONY: test-art-target-run-test-dependencies test-art-run-test-dependencies : test-art-host-run-test-dependencies test-art-target-run-test-dependencies +.PHONY: test-art-run-test-dependencies # Create a rule to build and run a test group of the following form: # test-art-{1: host target}-run-test diff --git a/test/etc/default-build b/test/etc/default-build index c8993c6611..39f1a251c7 100755 --- a/test/etc/default-build +++ b/test/etc/default-build @@ -119,12 +119,13 @@ DEV_MODE="no" # The key for default arguments if no experimental things are enabled. DEFAULT_EXPERIMENT="no-experiment" -# Setup experimental flag mappings in a bash associative array. -declare -A SMALI_EXPERIMENTAL_ARGS -SMALI_EXPERIMENTAL_ARGS["default-methods"]="--api 24" -SMALI_EXPERIMENTAL_ARGS["method-handles"]="--api 26" -SMALI_EXPERIMENTAL_ARGS["var-handles"]="--api 26" -SMALI_EXPERIMENTAL_ARGS["agents"]="--api 26" +# Setup experimental API level mappings in a bash associative array. +declare -A EXPERIMENTAL_API_LEVEL +EXPERIMENTAL_API_LEVEL["default-methods"]="24" +EXPERIMENTAL_API_LEVEL["parameter-annotations"]="25" +EXPERIMENTAL_API_LEVEL["agents"]="26" +EXPERIMENTAL_API_LEVEL["method-handles"]="26" +EXPERIMENTAL_API_LEVEL["var-handles"]="28" declare -A JAVAC_EXPERIMENTAL_ARGS JAVAC_EXPERIMENTAL_ARGS["default-methods"]="-source 1.8 -target 1.8" @@ -135,11 +136,6 @@ JAVAC_EXPERIMENTAL_ARGS["var-handles"]="-source 1.8 -target 1.8" JAVAC_EXPERIMENTAL_ARGS[${DEFAULT_EXPERIMENT}]="-source 1.8 -target 1.8" JAVAC_EXPERIMENTAL_ARGS["agents"]="-source 1.8 -target 1.8" -declare -A DX_EXPERIMENTAL_ARGS -DX_EXPERIMENTAL_ARGS["method-handles"]="--min-sdk-version=26" -DX_EXPERIMENTAL_ARGS["parameter-annotations"]="--min-sdk-version=25" -DX_EXPERIMENTAL_ARGS["var-handles"]="--min-sdk-version=28" - while true; do if [ "x$1" = "x--dx-option" ]; then shift @@ -216,14 +212,15 @@ if [[ $BUILD_MODE == jvm ]]; then HAS_SRC_ART=false fi -# Be sure to get any default arguments if not doing any experiments. -EXPERIMENTAL="${EXPERIMENTAL} ${DEFAULT_EXPERIMENT}" - # Add args from the experimental mappings. -for experiment in ${EXPERIMENTAL}; do - SMALI_ARGS="${SMALI_ARGS} ${SMALI_EXPERIMENTAL_ARGS[${experiment}]}" +for experiment in ${EXPERIMENTAL} ${DEFAULT_EXPERIMENT}; do JAVAC_ARGS="${JAVAC_ARGS} ${JAVAC_EXPERIMENTAL_ARGS[${experiment}]}" - DX_FLAGS="${DX_FLAGS} ${DX_EXPERIMENTAL_ARGS[${experiment}]}" +done + +for experiment in ${EXPERIMENTAL}; do + SMALI_ARGS="${SMALI_ARGS} --api ${EXPERIMENTAL_API_LEVEL[${experiment}]}" + DX_FLAGS="${DX_FLAGS} --min-sdk-version=${EXPERIMENTAL_API_LEVEL[${experiment}]}" + D8_FLAGS="--min-api ${EXPERIMENTAL_API_LEVEL[${experiment}]}" done ######################################### @@ -289,17 +286,21 @@ function make_dex() { local name="$1" local dx_input - if [[ "$USE_DESUGAR" == "true" ]]; then - # Make a jar first so desugar doesn't need every .class file individually. - jar cf "$name.before-desugar.jar" -C "$name" . + if [[ "$USE_DESUGAR" != "true" ]]; then + # Use d8 with --no-desugaring for the 3 tests that opt out of desugaring (b/110150973). + local d8_inputs=$(find classes -name '*.class' -type f) + ${D8} ${D8_FLAGS} --debug --no-desugaring --output ${name}.jar $d8_inputs && \ + jar -xf ${name}.jar ${name}.dex && \ + rm ${name}.jar + return $? + fi - dx_input="${name}.desugar.jar" + # Make a jar first so desugar doesn't need every .class file individually. + jar cf "$name.before-desugar.jar" -C "$name" . - # Make desugared JAR. - desugar --input "$name.before-desugar.jar" --output "$dx_input" - else - dx_input="${name}" - fi + # Make desugared JAR. + dx_input="${name}.desugar.jar" + desugar --input "$name.before-desugar.jar" --output "$dx_input" local dexer="${DX}" if [[ "${USE_D8}" != "false" ]]; then @@ -307,7 +308,11 @@ function make_dex() { fi # Make dex file from desugared JAR. - ${dexer} -JXmx256m ${DX_VM_FLAGS} --debug --dex --output=${name}.dex ${DX_FLAGS} "${dx_input}" + local dexer_flags="${DX_FLAGS} --debug --dex" + if [ $DEV_MODE = "yes" ]; then + echo ${dexer} -JXmx256m ${DX_VM_FLAGS} $dexer_flags --output=${name}.dex "${dx_input}" + fi + ${dexer} -JXmx256m ${DX_VM_FLAGS} $dexer_flags --output=${name}.dex "${dx_input}" } # Merge all the dex files in $1..$N into $1. Skip non-existing files, but at least 1 file must exist. @@ -432,8 +437,7 @@ if [[ "${HAS_JASMIN}" == true ]]; then # Compile Jasmin classes as if they were part of the classes.dex file. make_jasmin jasmin_classes $(find 'jasmin' -name '*.j') if [[ "${NEED_DEX}" == "true" ]]; then - # Disable desugar because it won't handle intentional linkage errors. - USE_DESUGAR=false make_dex jasmin_classes + make_dex jasmin_classes make_dexmerge classes.dex jasmin_classes.dex else # Move jasmin classes into classes directory so that they are picked up with -cp classes. @@ -458,10 +462,7 @@ if [[ "$HAS_JASMIN_MULTIDEX" == true ]]; then make_jasmin jasmin_classes2 $(find 'jasmin-multidex' -name '*.j') if [[ "${NEED_DEX}" == "true" ]]; then - # Disable desugar because it won't handle intentional linkage errors. - USE_DESUGAR=false make_dex jasmin_classes2 - - # Merge jasmin_classes2.dex into classes2.dex + make_dex jasmin_classes2 make_dexmerge classes2.dex jasmin_classes2.dex else # Move jasmin classes into classes2 directory so that they are picked up with -cp classes2. diff --git a/test/testrunner/target_config.py b/test/testrunner/target_config.py index 68efcaf5a3..1924cee310 100644 --- a/test/testrunner/target_config.py +++ b/test/testrunner/target_config.py @@ -265,14 +265,8 @@ target_config = { 'ART_USE_READ_BARRIER' : 'false' } }, - 'art-gtest-valgrind32': { - # Disabled: Valgrind is no longer supported. - # Historical note: This was already disabled, as x86 valgrind did not understand SSE4.x - # 'make' : 'valgrind-test-art-host32', - 'env': { - 'ART_USE_READ_BARRIER' : 'false' - } - }, + # TODO: Remove this configuration, when the ART Buildbot is no + # longer using it for 'host-x86_64-valgrind'. 'art-gtest-valgrind64': { # Disabled: Valgrind is no longer supported. # 'make' : 'valgrind-test-art-host64', @@ -293,10 +287,12 @@ target_config = { 'ASAN_OPTIONS' : 'detect_leaks=0' } }, + # TODO: Also exercise '--interp-ac' in 'art-asan', when b/109813469 is addressed. 'art-asan': { 'run-test' : ['--interpreter', '--optimizing', - '--jit'], + '--jit', + '--speed-profile'], 'env': { 'SANITIZE_HOST' : 'address', 'ASAN_OPTIONS' : 'detect_leaks=0' diff --git a/test/ti-agent/breakpoint_helper.cc b/test/ti-agent/breakpoint_helper.cc index db4ea61f1c..83ba0a6342 100644 --- a/test/ti-agent/breakpoint_helper.cc +++ b/test/ti-agent/breakpoint_helper.cc @@ -114,7 +114,7 @@ extern "C" JNIEXPORT jlong JNICALL Java_art_Breakpoint_getStartLocation(JNIEnv* return 0; } jlong start = 0; - jlong end = end; + jlong end; JvmtiErrorToException(env, jvmti_env, jvmti_env->GetMethodLocation(method, &start, &end)); return start; } diff --git a/tools/ahat/Android.mk b/tools/ahat/Android.mk index 9f423ba76d..2741a9247d 100644 --- a/tools/ahat/Android.mk +++ b/tools/ahat/Android.mk @@ -28,9 +28,6 @@ LOCAL_IS_HOST_MODULE := true LOCAL_MODULE_TAGS := optional LOCAL_MODULE := ahat -# Let users with Java 7 run ahat (b/28303627) -LOCAL_JAVA_LANGUAGE_VERSION := 1.7 - # Make this available on the classpath of the general-tests tradefed suite. # It is used by libcore tests that run there. LOCAL_COMPATIBILITY_SUITE := general-tests diff --git a/tools/ahat/etc/ahat_api.txt b/tools/ahat/etc/ahat_api.txt index 93fe46bf8b..6fc62e78eb 100644 --- a/tools/ahat/etc/ahat_api.txt +++ b/tools/ahat/etc/ahat_api.txt @@ -10,6 +10,7 @@ package com.android.ahat.dominators { public class DominatorsComputation { method public static void computeDominators(com.android.ahat.dominators.DominatorsComputation.Node); + method public static void computeDominators(com.android.ahat.dominators.DominatorsComputation.Node, com.android.ahat.progress.Progress, long); } public static abstract interface DominatorsComputation.Node { @@ -27,12 +28,10 @@ package com.android.ahat.heapdump { method public int getLength(); method public com.android.ahat.heapdump.Value getValue(int); method public java.util.List<com.android.ahat.heapdump.Value> getValues(); - method public java.lang.String toString(); } public class AhatClassInstance extends com.android.ahat.heapdump.AhatInstance { method public java.lang.Iterable<com.android.ahat.heapdump.FieldValue> getInstanceFields(); - method public java.lang.String toString(); } public class AhatClassObj extends com.android.ahat.heapdump.AhatInstance { @@ -42,7 +41,6 @@ package com.android.ahat.heapdump { method public java.lang.String getName(); method public java.util.List<com.android.ahat.heapdump.FieldValue> getStaticFieldValues(); method public com.android.ahat.heapdump.AhatClassObj getSuperClassObj(); - method public java.lang.String toString(); } public class AhatHeap implements com.android.ahat.heapdump.Diffable { @@ -157,8 +155,13 @@ package com.android.ahat.heapdump { } public class Parser { + ctor public Parser(java.nio.ByteBuffer); + ctor public Parser(java.io.File) throws java.io.IOException; + method public com.android.ahat.heapdump.Parser map(com.android.ahat.proguard.ProguardMap); + method public com.android.ahat.heapdump.AhatSnapshot parse() throws com.android.ahat.heapdump.HprofFormatException, java.io.IOException; method public static com.android.ahat.heapdump.AhatSnapshot parseHeapDump(java.io.File, com.android.ahat.proguard.ProguardMap) throws com.android.ahat.heapdump.HprofFormatException, java.io.IOException; method public static com.android.ahat.heapdump.AhatSnapshot parseHeapDump(java.nio.ByteBuffer, com.android.ahat.proguard.ProguardMap) throws com.android.ahat.heapdump.HprofFormatException, java.io.IOException; + method public com.android.ahat.heapdump.Parser progress(com.android.ahat.progress.Progress); } public class PathElement implements com.android.ahat.heapdump.Diffable { @@ -284,6 +287,26 @@ package com.android.ahat.heapdump { } +package com.android.ahat.progress { + + public class NullProgress implements com.android.ahat.progress.Progress { + ctor public NullProgress(); + method public void advance(long); + method public void done(); + method public void start(java.lang.String, long); + method public void update(long); + } + + public abstract interface Progress { + method public default void advance(); + method public abstract void advance(long); + method public abstract void done(); + method public abstract void start(java.lang.String, long); + method public abstract void update(long); + } + +} + package com.android.ahat.proguard { public class ProguardMap { diff --git a/tools/ahat/src/main/com/android/ahat/AsciiProgress.java b/tools/ahat/src/main/com/android/ahat/AsciiProgress.java new file mode 100644 index 0000000000..3ac98a443a --- /dev/null +++ b/tools/ahat/src/main/com/android/ahat/AsciiProgress.java @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.android.ahat; + +import com.android.ahat.progress.Progress; + +/** + * A progress bar that prints ascii to System.out. + * <p> + * For best results, have System.out positioned at a new line before using + * this progress indicator. + */ +class AsciiProgress implements Progress { + private String description; + private long duration; + private long progress; + + private static void display(String description, long percent) { + System.out.print(String.format("\r[ %3d%% ] %s ...", percent, description)); + System.out.flush(); + } + + @Override + public void start(String description, long duration) { + assert this.description == null; + this.description = description; + this.duration = duration; + this.progress = 0; + display(description, 0); + } + + @Override + public void advance(long n) { + update(progress + n); + } + + @Override + public void update(long current) { + assert description != null; + long oldPercent = progress * 100 / duration; + long newPercent = current * 100 / duration; + progress = current; + + if (newPercent > oldPercent) { + display(description, newPercent); + } + } + + @Override + public void done() { + update(duration); + System.out.println(); + this.description = null; + } +} diff --git a/tools/ahat/src/main/com/android/ahat/Main.java b/tools/ahat/src/main/com/android/ahat/Main.java index af197d4862..d3cfcf9e94 100644 --- a/tools/ahat/src/main/com/android/ahat/Main.java +++ b/tools/ahat/src/main/com/android/ahat/Main.java @@ -20,6 +20,7 @@ import com.android.ahat.heapdump.AhatSnapshot; import com.android.ahat.heapdump.Diff; import com.android.ahat.heapdump.HprofFormatException; import com.android.ahat.heapdump.Parser; +import com.android.ahat.progress.Progress; import com.android.ahat.proguard.ProguardMap; import com.sun.net.httpserver.HttpServer; import java.io.File; @@ -58,10 +59,10 @@ public class Main { * Prints an error message and exits the application on failure to load the * heap dump. */ - private static AhatSnapshot loadHeapDump(File hprof, ProguardMap map) { + private static AhatSnapshot loadHeapDump(File hprof, ProguardMap map, Progress progress) { System.out.println("Processing '" + hprof + "' ..."); try { - return Parser.parseHeapDump(hprof, map); + return new Parser(hprof).map(map).progress(progress).parse(); } catch (IOException e) { System.err.println("Unable to load '" + hprof + "':"); e.printStackTrace(); @@ -152,9 +153,9 @@ public class Main { System.exit(1); } - AhatSnapshot ahat = loadHeapDump(hprof, map); + AhatSnapshot ahat = loadHeapDump(hprof, map, new AsciiProgress()); if (hprofbase != null) { - AhatSnapshot base = loadHeapDump(hprofbase, mapbase); + AhatSnapshot base = loadHeapDump(hprofbase, mapbase, new AsciiProgress()); System.out.println("Diffing heap dumps ..."); Diff.snapshots(ahat, base); diff --git a/tools/ahat/src/main/com/android/ahat/dominators/DominatorsComputation.java b/tools/ahat/src/main/com/android/ahat/dominators/DominatorsComputation.java index 6185deed04..903211eb50 100644 --- a/tools/ahat/src/main/com/android/ahat/dominators/DominatorsComputation.java +++ b/tools/ahat/src/main/com/android/ahat/dominators/DominatorsComputation.java @@ -16,6 +16,8 @@ package com.android.ahat.dominators; +import com.android.ahat.progress.NullProgress; +import com.android.ahat.progress.Progress; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Deque; @@ -146,6 +148,10 @@ public class DominatorsComputation { // If revisit != null, this node is on the global list of nodes to be // revisited. public NodeSet revisit = null; + + // Distance from the root to this node. Used for purposes of tracking + // progress only. + public long depth; } // A collection of node ids. @@ -245,6 +251,23 @@ public class DominatorsComputation { * @see Node */ public static void computeDominators(Node root) { + computeDominators(root, new NullProgress(), 0); + } + + /** + * Computes the immediate dominators of all nodes reachable from the <code>root</code> node. + * There must not be any incoming references to the <code>root</code> node. + * <p> + * The result of this function is to call the {@link Node#setDominator} + * function on every node reachable from the root node. + * + * @param root the root node of the dominators computation + * @param progress progress tracker. + * @param numNodes upper bound on the number of reachable nodes in the + * graph, for progress tracking purposes only. + * @see Node + */ + public static void computeDominators(Node root, Progress progress, long numNodes) { long id = 0; // The set of nodes xS such that xS.revisit != null. @@ -257,6 +280,7 @@ public class DominatorsComputation { NodeS rootS = new NodeS(); rootS.node = root; rootS.id = id++; + rootS.depth = 0; root.setDominatorsComputationState(rootS); Deque<Link> dfs = new ArrayDeque<Link>(); @@ -265,8 +289,14 @@ public class DominatorsComputation { dfs.push(new Link(rootS, child)); } + // workBound is an upper bound on the amount of work required in the + // second phase of dominators computation, used solely for the purposes of + // tracking progress. + long workBound = 0; + // 1. Do a depth first search of the nodes, label them with ids and come // up with initial candidate dominators for them. + progress.start("Initializing dominators", numNodes); while (!dfs.isEmpty()) { Link link = dfs.pop(); @@ -274,6 +304,7 @@ public class DominatorsComputation { // This is the marker link indicating we have now visited all // nodes reachable from link.srcS. link.srcS.maxReachableId = id - 1; + progress.advance(); } else { NodeS dstS = (NodeS)link.dst.getDominatorsComputationState(); if (dstS == null) { @@ -288,6 +319,7 @@ public class DominatorsComputation { dstS.domS = link.srcS; dstS.domS.dominated.add(dstS); dstS.oldDomS = link.srcS; + dstS.depth = link.srcS.depth + 1; dfs.push(new Link(dstS)); for (Node child : link.dst.getReferencesForDominators()) { @@ -296,6 +328,10 @@ public class DominatorsComputation { } else { // We have seen the destination node before. Update the state based // on the new potential dominator. + if (dstS.inRefIds.size == 1) { + workBound += dstS.oldDomS.depth; + } + long seenid = dstS.inRefIds.last(); dstS.inRefIds.add(link.srcS.id); @@ -330,9 +366,11 @@ public class DominatorsComputation { } } } + progress.done(); // 2. Continue revisiting nodes until every node satisfies the requirement // that domS.id == oldDomS.id. + progress.start("Resolving dominators", workBound); while (!revisit.isEmpty()) { NodeS oldDomS = revisit.poll(); assert oldDomS.revisit != null; @@ -388,7 +426,10 @@ public class DominatorsComputation { nodeS.oldDomS.revisit.add(nodeS); } } + progress.advance((oldDomS.depth - oldDomS.oldDomS.depth) * nodes.size); } + progress.done(); + // 3. We have figured out the correct dominator for each node. Notify the // user of the results by doing one last traversal of the nodes. diff --git a/tools/ahat/src/main/com/android/ahat/heapdump/AhatInstance.java b/tools/ahat/src/main/com/android/ahat/heapdump/AhatInstance.java index 67253bf0e7..95553a21d1 100644 --- a/tools/ahat/src/main/com/android/ahat/heapdump/AhatInstance.java +++ b/tools/ahat/src/main/com/android/ahat/heapdump/AhatInstance.java @@ -17,6 +17,7 @@ package com.android.ahat.heapdump; import com.android.ahat.dominators.DominatorsComputation; +import com.android.ahat.progress.Progress; import java.awt.image.BufferedImage; import java.util.ArrayDeque; import java.util.ArrayList; @@ -603,10 +604,16 @@ public abstract class AhatInstance implements Diffable<AhatInstance>, * mNextInstanceToGcRootField * mHardReverseReferences * mSoftReverseReferences + * + * @param progress used to track progress of the traversal. + * @param numInsts upper bound on the total number of instances reachable + * from the root, solely used for the purposes of tracking + * progress. */ - static void computeReverseReferences(SuperRoot root) { + static void computeReverseReferences(SuperRoot root, Progress progress, long numInsts) { // Start by doing a breadth first search through strong references. // Then continue the breadth first search through weak references. + progress.start("Reversing references", numInsts); Queue<Reference> strong = new ArrayDeque<Reference>(); Queue<Reference> weak = new ArrayDeque<Reference>(); @@ -620,6 +627,7 @@ public abstract class AhatInstance implements Diffable<AhatInstance>, if (ref.ref.mNextInstanceToGcRoot == null) { // This is the first time we have seen ref.ref. + progress.advance(); ref.ref.mNextInstanceToGcRoot = ref.src; ref.ref.mNextInstanceToGcRootField = ref.field; ref.ref.mHardReverseReferences = new ArrayList<AhatInstance>(); @@ -646,6 +654,7 @@ public abstract class AhatInstance implements Diffable<AhatInstance>, if (ref.ref.mNextInstanceToGcRoot == null) { // This is the first time we have seen ref.ref. + progress.advance(); ref.ref.mNextInstanceToGcRoot = ref.src; ref.ref.mNextInstanceToGcRootField = ref.field; ref.ref.mHardReverseReferences = new ArrayList<AhatInstance>(); @@ -664,6 +673,8 @@ public abstract class AhatInstance implements Diffable<AhatInstance>, ref.ref.mSoftReverseReferences.add(ref.src); } } + + progress.done(); } /** diff --git a/tools/ahat/src/main/com/android/ahat/heapdump/AhatSnapshot.java b/tools/ahat/src/main/com/android/ahat/heapdump/AhatSnapshot.java index 535db082c1..bc940479b1 100644 --- a/tools/ahat/src/main/com/android/ahat/heapdump/AhatSnapshot.java +++ b/tools/ahat/src/main/com/android/ahat/heapdump/AhatSnapshot.java @@ -17,6 +17,7 @@ package com.android.ahat.heapdump; import com.android.ahat.dominators.DominatorsComputation; +import com.android.ahat.progress.Progress; import java.util.List; /** @@ -39,7 +40,8 @@ public class AhatSnapshot implements Diffable<AhatSnapshot> { AhatSnapshot(SuperRoot root, Instances<AhatInstance> instances, List<AhatHeap> heaps, - Site rootSite) { + Site rootSite, + Progress progress) { mSuperRoot = root; mInstances = instances; mHeaps = heaps; @@ -53,8 +55,8 @@ public class AhatSnapshot implements Diffable<AhatSnapshot> { } } - AhatInstance.computeReverseReferences(mSuperRoot); - DominatorsComputation.computeDominators(mSuperRoot); + AhatInstance.computeReverseReferences(mSuperRoot, progress, mInstances.size()); + DominatorsComputation.computeDominators(mSuperRoot, progress, mInstances.size()); AhatInstance.computeRetainedSize(mSuperRoot, mHeaps.size()); for (AhatHeap heap : mHeaps) { diff --git a/tools/ahat/src/main/com/android/ahat/heapdump/Instances.java b/tools/ahat/src/main/com/android/ahat/heapdump/Instances.java index 085144650f..7bb19a244b 100644 --- a/tools/ahat/src/main/com/android/ahat/heapdump/Instances.java +++ b/tools/ahat/src/main/com/android/ahat/heapdump/Instances.java @@ -67,6 +67,10 @@ class Instances<T extends AhatInstance> implements Iterable<T> { return null; } + public int size() { + return mInstances.size(); + } + @Override public Iterator<T> iterator() { return mInstances.iterator(); diff --git a/tools/ahat/src/main/com/android/ahat/heapdump/Parser.java b/tools/ahat/src/main/com/android/ahat/heapdump/Parser.java index 13be57d415..597a260628 100644 --- a/tools/ahat/src/main/com/android/ahat/heapdump/Parser.java +++ b/tools/ahat/src/main/com/android/ahat/heapdump/Parser.java @@ -16,6 +16,8 @@ package com.android.ahat.heapdump; +import com.android.ahat.progress.NullProgress; +import com.android.ahat.progress.Progress; import com.android.ahat.proguard.ProguardMap; import java.io.File; import java.io.IOException; @@ -33,35 +35,95 @@ import java.util.Map; /** * Provides methods for parsing heap dumps. + * <p> + * The heap dump should be a heap dump in the J2SE HPROF format optionally + * with Android extensions and satisfying the following additional + * constraints: + * <ul> + * <li> + * Class serial numbers, stack frames, and stack traces individually satisfy + * the following: + * <ul> + * <li> All elements are defined before they are referenced. + * <li> Ids are densely packed in some range [a, b] where a is not necessarily 0. + * <li> There are not more than 2^31 elements defined. + * </ul> + * <li> All classes are defined via a LOAD CLASS record before the first + * heap dump segment. + * <li> The ID size used in the heap dump is 4 bytes. + * </ul> */ public class Parser { private static final int ID_SIZE = 4; - private Parser() { + private HprofBuffer hprof = null; + private ProguardMap map = new ProguardMap(); + private Progress progress = new NullProgress(); + + /** + * Creates an hprof Parser that parses a heap dump from a byte buffer. + * + * @param hprof byte buffer to parse the heap dump from. + */ + public Parser(ByteBuffer hprof) { + this.hprof = new HprofBuffer(hprof); + } + + /** + * Creates an hprof Parser that parses a heap dump from a file. + * + * @param hprof file to parse the heap dump from. + * @throws IOException if the file cannot be accessed. + */ + public Parser(File hprof) throws IOException { + this.hprof = new HprofBuffer(hprof); } /** - * Parses a heap dump from a File. - * <p> - * The heap dump should be a heap dump in the J2SE HPROF format optionally - * with Android extensions and satisfying the following additional - * constraints: - * <ul> - * <li> - * Class serial numbers, stack frames, and stack traces individually satisfy - * the following: - * <ul> - * <li> All elements are defined before they are referenced. - * <li> Ids are densely packed in some range [a, b] where a is not necessarily 0. - * <li> There are not more than 2^31 elements defined. - * </ul> - * <li> All classes are defined via a LOAD CLASS record before the first - * heap dump segment. - * <li> The ID size used in the heap dump is 4 bytes. - * </ul> - * <p> - * The given proguard map will be used to deobfuscate class names, field - * names, and stack traces in the heap dump. + * Sets the proguard map to use for deobfuscating the heap. + * + * @param map proguard map to use to deobfuscate the heap. + * @return this Parser instance. + */ + public Parser map(ProguardMap map) { + if (map == null) { + throw new NullPointerException("map == null"); + } + this.map = map; + return this; + } + + /** + * Sets the progress indicator to use when parsing the heap. + * + * @param progress progress indicator to use when parsing the heap. + * @return this Parser instance. + */ + public Parser progress(Progress progress) { + if (progress == null) { + throw new NullPointerException("progress == null"); + } + this.progress = progress; + return this; + } + + /** + * Parse the heap dump. + * + * @throws IOException if the heap dump could not be read + * @throws HprofFormatException if the heap dump is not properly formatted + * @return the parsed heap dump + */ + public AhatSnapshot parse() throws IOException, HprofFormatException { + try { + return parseInternal(); + } catch (BufferUnderflowException e) { + throw new HprofFormatException("Unexpected end of file", e); + } + } + + /** + * Parses a heap dump from a File with given proguard map. * * @param hprof the hprof file to parse * @param map the proguard map for deobfuscation @@ -71,35 +133,11 @@ public class Parser { */ public static AhatSnapshot parseHeapDump(File hprof, ProguardMap map) throws IOException, HprofFormatException { - try { - return parseHeapDump(new HprofBuffer(hprof), map); - } catch (BufferUnderflowException e) { - throw new HprofFormatException("Unexpected end of file", e); - } + return new Parser(hprof).map(map).parse(); } /** - * Parses a heap dump from a byte buffer. - * <p> - * The heap dump should be a heap dump in the J2SE HPROF format optionally - * with Android extensions and satisfying the following additional - * constraints: - * <ul> - * <li> - * Class serial numbers, stack frames, and stack traces individually satisfy - * the following: - * <ul> - * <li> All elements are defined before they are referenced. - * <li> Ids are densely packed in some range [a, b] where a is not necessarily 0. - * <li> There are not more than 2^31 elements defined. - * </ul> - * <li> All classes are defined via a LOAD CLASS record before the first - * heap dump segment. - * <li> The ID size used in the heap dump is 4 bytes. - * </ul> - * <p> - * The given proguard map will be used to deobfuscate class names, field - * names, and stack traces in the heap dump. + * Parses a heap dump from a byte buffer with given proguard map. * * @param hprof the bytes of the hprof file to parse * @param map the proguard map for deobfuscation @@ -109,15 +147,10 @@ public class Parser { */ public static AhatSnapshot parseHeapDump(ByteBuffer hprof, ProguardMap map) throws IOException, HprofFormatException { - try { - return parseHeapDump(new HprofBuffer(hprof), map); - } catch (BufferUnderflowException e) { - throw new HprofFormatException("Unexpected end of file", e); - } + return new Parser(hprof).map(map).parse(); } - private static AhatSnapshot parseHeapDump(HprofBuffer hprof, ProguardMap map) - throws IOException, HprofFormatException, BufferUnderflowException { + private AhatSnapshot parseInternal() throws IOException, HprofFormatException { // Read, and mostly ignore, the hprof header info. { StringBuilder format = new StringBuilder(); @@ -154,7 +187,9 @@ public class Parser { ArrayList<AhatClassObj> classes = new ArrayList<AhatClassObj>(); Instances<AhatClassObj> classById = null; + progress.start("Reading hprof", hprof.size()); while (hprof.hasRemaining()) { + progress.update(hprof.tell()); int tag = hprof.getU1(); int time = hprof.getU4(); int recordLength = hprof.getU4(); @@ -230,6 +265,7 @@ public class Parser { } int subtag; while (!isEndOfHeapDumpSegment(subtag = hprof.getU1())) { + progress.update(hprof.tell()); switch (subtag) { case 0x01: { // ROOT JNI GLOBAL long objectId = hprof.getId(); @@ -524,6 +560,7 @@ public class Parser { break; } } + progress.done(); instances.addAll(classes); } @@ -542,9 +579,11 @@ public class Parser { // that we couldn't previously resolve. SuperRoot superRoot = new SuperRoot(); { + progress.start("Resolving references", mInstances.size()); Iterator<RootData> ri = roots.iterator(); RootData root = ri.next(); for (AhatInstance inst : mInstances) { + progress.advance(); long id = inst.getId(); // Skip past any roots that don't have associated instances. @@ -613,11 +652,12 @@ public class Parser { ((AhatArrayInstance)inst).initialize(array); } } + progress.done(); } hprof = null; roots = null; - return new AhatSnapshot(superRoot, mInstances, heaps.heaps, rootSite); + return new AhatSnapshot(superRoot, mInstances, heaps.heaps, rootSite, progress); } private static boolean isEndOfHeapDumpSegment(int subtag) { @@ -867,6 +907,13 @@ public class Parser { } /** + * Returns the size of the file in bytes. + */ + public int size() { + return mBuffer.capacity(); + } + + /** * Return the current absolution position in the file. */ public int tell() { diff --git a/tools/ahat/src/main/com/android/ahat/progress/NullProgress.java b/tools/ahat/src/main/com/android/ahat/progress/NullProgress.java new file mode 100644 index 0000000000..a0ca08487b --- /dev/null +++ b/tools/ahat/src/main/com/android/ahat/progress/NullProgress.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.android.ahat.progress; + +/** + * Null progress tracker that ignores all updates. + */ +public class NullProgress implements Progress { + @Override public void start(String description, long duration) { } + @Override public void advance() { } + @Override public void advance(long n) { } + @Override public void update(long current) { } + @Override public void done() { } +} diff --git a/tools/ahat/src/main/com/android/ahat/progress/Progress.java b/tools/ahat/src/main/com/android/ahat/progress/Progress.java new file mode 100644 index 0000000000..a10379da7a --- /dev/null +++ b/tools/ahat/src/main/com/android/ahat/progress/Progress.java @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.android.ahat.progress; + +/** + * Interface for notifying users of progress during long operations. + */ +public interface Progress { + /** + * Called to indicate the start of a new phase of work with the given + * duration. Behavior is undefined if there is a current phase in progress. + * + * @param description human readable description of the work to be done. + * @param duration the maximum duration of the phase, in arbitrary units + * appropriate for the work in question. + */ + void start(String description, long duration); + + /** + * Called to indicate the current phase has advanced a single unit of its + * overall duration towards completion. Behavior is undefined if there is no + * current phase in progress. + */ + default void advance() { + advance(1); + } + + /** + * Called to indicate the current phase has advanced <code>n</code> units of + * its overall duration towards completion. Behavior is undefined if there + * is no current phase in progress. + * + * @param n number of units of progress that have advanced. + */ + void advance(long n); + + /** + * Called to indicate the current phase has completed <code>current</code> + * absolute units of its overall duration. Behavior is undefined if there is + * no current phase in progress. + * + * @param current progress towards duration + */ + void update(long current); + + /** + * Called to indicates that the current phase has been completed. Behavior + * is undefined if there is no current phase in progress. + */ + void done(); +} diff --git a/tools/teardown-buildbot-device.sh b/tools/teardown-buildbot-device.sh index d25dd2b15f..be68b9f490 100755 --- a/tools/teardown-buildbot-device.sh +++ b/tools/teardown-buildbot-device.sh @@ -34,17 +34,36 @@ if [[ -n "$ART_TEST_CHROOT" ]]; then echo -e "${green}List open files under chroot dir $ART_TEST_CHROOT${nc}" adb shell lsof | grep "$ART_TEST_CHROOT" - echo -e "${green}List processes running from binaries under chroot dir $ART_TEST_CHROOT${nc}" - for link in $(adb shell ls -d "/proc/*/root"); do - root=$(adb shell readlink "$link") - if [[ "x$root" = "x$ART_TEST_CHROOT" ]]; then - dir=$(dirname "$link") - pid=$(basename "$dir") - cmdline=$(adb shell cat "$dir"/cmdline | tr -d '\000') - echo "$cmdline (PID: $pid)" - fi - done + # for_all_chroot_process ACTION + # ----------------------------- + # Execute ACTION on all processes running from binaries located + # under the chroot directory. ACTION is passed two arguments: the + # PID of the process, and a string containing the command line + # that started this process. + for_all_chroot_process() { + local action=$1 + for link in $(adb shell ls -d "/proc/*/root"); do + local root=$(adb shell readlink "$link") + if [[ "x$root" = "x$ART_TEST_CHROOT" ]]; then + local dir=$(dirname "$link") + local pid=$(basename "$dir") + local cmdline=$(adb shell cat "$dir"/cmdline | tr '\000' ' ') + $action "$pid" "$cmdline" + fi + done + } + # display_process PID CMDLINE + # --------------------------- + # Display information about process with given PID, that was started with CMDLINE. + display_process() { + local pid=$1 + local cmdline=$2 + echo "$cmdline (PID: $pid)" + } + + echo -e "${green}List processes running from binaries under chroot dir $ART_TEST_CHROOT${nc}" + for_all_chroot_process display_process # Tear down the chroot dir. @@ -102,5 +121,22 @@ if [[ -n "$ART_TEST_CHROOT" ]]; then for f in $property_context_files; do adb shell rm -f "$ART_TEST_CHROOT$f" done + + + # Kill processes still running in the chroot. + + # kill_process PID CMDLINE + # ------------------------ + # Kill process with given PID, that was started with CMDLINE. + kill_process() { + local pid=$1 + local cmdline=$2 + echo "Killing $cmdline (PID: $pid)" + adb shell kill -9 "$pid" + } + + echo -e "${green}Kill processes still running from binaries under" \ + "chroot dir $ART_TEST_CHROOT (if any)${nc} " + for_all_chroot_process kill_process fi fi diff --git a/tools/wrapagentproperties/wrapagentproperties.cc b/tools/wrapagentproperties/wrapagentproperties.cc index 8b4b062cf5..77e19e691a 100644 --- a/tools/wrapagentproperties/wrapagentproperties.cc +++ b/tools/wrapagentproperties/wrapagentproperties.cc @@ -245,7 +245,7 @@ enum class StartType { static jint CallNextAgent(StartType start, ProxyJavaVM* vm, - std::string options, + const std::string& options, void* reserved) { // TODO It might be good to set it up so that the library is unloaded even if no jvmtiEnv's are // created but this isn't expected to be common so we will just not bother. |