diff options
-rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 152 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 226 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 128 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 128 | ||||
-rw-r--r-- | test/538-checker-embed-constants/src/Main.java | 79 |
5 files changed, 182 insertions, 531 deletions
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 5301a6bb3e..3c4a3e8e18 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -4598,7 +4598,6 @@ void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) { } if (needs_write_barrier) { // Temporary registers for the write barrier. - // These registers may be used for Baker read barriers too. locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too. locations->AddTemp(Location::RequiresRegister()); } @@ -4716,127 +4715,42 @@ void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) { __ Bind(&non_zero); } - if (kEmitCompilerReadBarrier) { - if (!kUseBakerReadBarrier) { - // When (non-Baker) read barriers are enabled, the type - // checking instrumentation requires two read barriers - // generated by CodeGeneratorARM::GenerateReadBarrierSlow: - // - // __ Mov(temp2, temp1); - // // /* HeapReference<Class> */ temp1 = temp1->component_type_ - // __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset); - // codegen_->GenerateReadBarrierSlow( - // instruction, temp1_loc, temp1_loc, temp2_loc, component_offset); - // - // // /* HeapReference<Class> */ temp2 = value->klass_ - // __ LoadFromOffset(kLoadWord, temp2, value, class_offset); - // codegen_->GenerateReadBarrierSlow( - // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp1_loc); - // - // __ cmp(temp1, ShifterOperand(temp2)); - // - // However, the second read barrier may trash `temp`, as it - // is a temporary register, and as such would not be saved - // along with live registers before calling the runtime (nor - // restored afterwards). So in this case, we bail out and - // delegate the work to the array set slow path. - // - // TODO: Extend the register allocator to support a new - // "(locally) live temp" location so as to avoid always - // going into the slow path when read barriers are enabled? - // - // There is no such problem with Baker read barriers (see below). - __ b(slow_path->GetEntryLabel()); - } else { - Register temp3 = IP; - Location temp3_loc = Location::RegisterLocation(temp3); - - // Note: `temp3` (scratch register IP) cannot be used as - // `ref` argument of GenerateFieldLoadWithBakerReadBarrier - // calls below (see ReadBarrierMarkSlowPathARM for more - // details). - - // /* HeapReference<Class> */ temp1 = array->klass_ - codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction, - temp1_loc, - array, - class_offset, - temp3_loc, - /* needs_null_check */ true); - - // /* HeapReference<Class> */ temp1 = temp1->component_type_ - codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction, - temp1_loc, - temp1, - component_offset, - temp3_loc, - /* needs_null_check */ false); - // Register `temp1` is not trashed by the read barrier - // emitted by GenerateFieldLoadWithBakerReadBarrier below, - // as that method produces a call to a ReadBarrierMarkRegX - // entry point, which saves all potentially live registers, - // including temporaries such a `temp1`. - // /* HeapReference<Class> */ temp2 = value->klass_ - codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction, - temp2_loc, - value, - class_offset, - temp3_loc, - /* needs_null_check */ false); - // If heap poisoning is enabled, `temp1` and `temp2` have - // been unpoisoned by the the previous calls to - // CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier. - __ cmp(temp1, ShifterOperand(temp2)); - - if (instruction->StaticTypeOfArrayIsObjectArray()) { - Label do_put; - __ b(&do_put, EQ); - // We do not need to emit a read barrier for the - // following heap reference load, as `temp1` is only used - // in a comparison with null below, and this reference - // is not kept afterwards. - // /* HeapReference<Class> */ temp1 = temp1->super_class_ - __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset); - // If heap poisoning is enabled, no need to unpoison - // `temp`, as we are comparing against null below. - __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel()); - __ Bind(&do_put); - } else { - __ b(slow_path->GetEntryLabel(), NE); - } - } - } else { - // Non read barrier code. + // Note that when read barriers are enabled, the type checks + // are performed without read barriers. This is fine, even in + // the case where a class object is in the from-space after + // the flip, as a comparison involving such a type would not + // produce a false positive; it may of course produce a false + // negative, in which case we would take the ArraySet slow + // path. - // /* HeapReference<Class> */ temp1 = array->klass_ - __ LoadFromOffset(kLoadWord, temp1, array, class_offset); - codegen_->MaybeRecordImplicitNullCheck(instruction); + // /* HeapReference<Class> */ temp1 = array->klass_ + __ LoadFromOffset(kLoadWord, temp1, array, class_offset); + codegen_->MaybeRecordImplicitNullCheck(instruction); + __ MaybeUnpoisonHeapReference(temp1); + + // /* HeapReference<Class> */ temp1 = temp1->component_type_ + __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset); + // /* HeapReference<Class> */ temp2 = value->klass_ + __ LoadFromOffset(kLoadWord, temp2, value, class_offset); + // If heap poisoning is enabled, no need to unpoison `temp1` + // nor `temp2`, as we are comparing two poisoned references. + __ cmp(temp1, ShifterOperand(temp2)); + + if (instruction->StaticTypeOfArrayIsObjectArray()) { + Label do_put; + __ b(&do_put, EQ); + // If heap poisoning is enabled, the `temp1` reference has + // not been unpoisoned yet; unpoison it now. __ MaybeUnpoisonHeapReference(temp1); - // /* HeapReference<Class> */ temp1 = temp1->component_type_ - __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset); - // /* HeapReference<Class> */ temp2 = value->klass_ - __ LoadFromOffset(kLoadWord, temp2, value, class_offset); - // If heap poisoning is enabled, no need to unpoison `temp1` - // nor `temp2`, as we are comparing two poisoned references. - __ cmp(temp1, ShifterOperand(temp2)); - - if (instruction->StaticTypeOfArrayIsObjectArray()) { - Label do_put; - __ b(&do_put, EQ); - // If heap poisoning is enabled, the `temp1` reference has - // not been unpoisoned yet; unpoison it now. - __ MaybeUnpoisonHeapReference(temp1); - - // /* HeapReference<Class> */ temp1 = temp1->super_class_ - __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset); - // If heap poisoning is enabled, no need to unpoison - // `temp1`, as we are comparing against null below. - __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel()); - __ Bind(&do_put); - } else { - __ b(slow_path->GetEntryLabel(), NE); - } + // /* HeapReference<Class> */ temp1 = temp1->super_class_ + __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset); + // If heap poisoning is enabled, no need to unpoison + // `temp1`, as we are comparing against null below. + __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel()); + __ Bind(&do_put); + } else { + __ b(slow_path->GetEntryLabel(), NE); } } diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 36f7b4d914..1d2f33405e 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -336,36 +336,6 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 { DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64); }; -class LoadStringSlowPathARM64 : public SlowPathCodeARM64 { - public: - explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {} - - void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { - LocationSummary* locations = instruction_->GetLocations(); - DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); - CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); - - __ Bind(GetEntryLabel()); - SaveLiveRegisters(codegen, locations); - - InvokeRuntimeCallingConvention calling_convention; - const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex(); - __ Mov(calling_convention.GetRegisterAt(0).W(), string_index); - arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this); - CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); - Primitive::Type type = instruction_->GetType(); - arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type); - - RestoreLiveRegisters(codegen, locations); - __ B(GetExitLabel()); - } - - const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; } - - private: - DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64); -}; - class NullCheckSlowPathARM64 : public SlowPathCodeARM64 { public: explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {} @@ -2178,11 +2148,6 @@ void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) { } else { locations->SetInAt(2, Location::RequiresRegister()); } - if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && (value_type == Primitive::kPrimNot)) { - // Additional temporary registers for a Baker read barrier. - locations->AddTemp(Location::RequiresRegister()); - locations->AddTemp(Location::RequiresRegister()); - } } void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) { @@ -2269,144 +2234,44 @@ void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) { __ Bind(&non_zero); } - if (kEmitCompilerReadBarrier) { - if (!kUseBakerReadBarrier) { - // When (non-Baker) read barriers are enabled, the type - // checking instrumentation requires two read barriers - // generated by CodeGeneratorARM64::GenerateReadBarrierSlow: - // - // __ Mov(temp2, temp); - // // /* HeapReference<Class> */ temp = temp->component_type_ - // __ Ldr(temp, HeapOperand(temp, component_offset)); - // codegen_->GenerateReadBarrierSlow( - // instruction, temp_loc, temp_loc, temp2_loc, component_offset); - // - // // /* HeapReference<Class> */ temp2 = value->klass_ - // __ Ldr(temp2, HeapOperand(Register(value), class_offset)); - // codegen_->GenerateReadBarrierSlow( - // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc); - // - // __ Cmp(temp, temp2); - // - // However, the second read barrier may trash `temp`, as it - // is a temporary register, and as such would not be saved - // along with live registers before calling the runtime (nor - // restored afterwards). So in this case, we bail out and - // delegate the work to the array set slow path. - // - // TODO: Extend the register allocator to support a new - // "(locally) live temp" location so as to avoid always - // going into the slow path when read barriers are enabled? - // - // There is no such problem with Baker read barriers (see below). - __ B(slow_path->GetEntryLabel()); - } else { - // Note that we cannot use `temps` (instance of VIXL's - // UseScratchRegisterScope) to allocate `temp2` because - // the Baker read barriers generated by - // GenerateFieldLoadWithBakerReadBarrier below also use - // that facility to allocate a temporary register, thus - // making VIXL's scratch register pool empty. - Location temp2_loc = locations->GetTemp(0); - Register temp2 = WRegisterFrom(temp2_loc); - - // Note: Because it is acquired from VIXL's scratch register - // pool, `temp` might be IP0, and thus cannot be used as - // `ref` argument of GenerateFieldLoadWithBakerReadBarrier - // calls below (see ReadBarrierMarkSlowPathARM64 for more - // details). - - // /* HeapReference<Class> */ temp2 = array->klass_ - codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction, - temp2_loc, - array, - class_offset, - temp, - /* needs_null_check */ true, - /* use_load_acquire */ false); - - // /* HeapReference<Class> */ temp2 = temp2->component_type_ - codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction, - temp2_loc, - temp2, - component_offset, - temp, - /* needs_null_check */ false, - /* use_load_acquire */ false); - // For the same reason that we request `temp2` from the - // register allocator above, we cannot get `temp3` from - // VIXL's scratch register pool. - Location temp3_loc = locations->GetTemp(1); - Register temp3 = WRegisterFrom(temp3_loc); - // Register `temp2` is not trashed by the read barrier - // emitted by GenerateFieldLoadWithBakerReadBarrier below, - // as that method produces a call to a ReadBarrierMarkRegX - // entry point, which saves all potentially live registers, - // including temporaries such a `temp2`. - // /* HeapReference<Class> */ temp3 = register_value->klass_ - codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction, - temp3_loc, - value.W(), - class_offset, - temp, - /* needs_null_check */ false, - /* use_load_acquire */ false); - // If heap poisoning is enabled, `temp2` and `temp3` have - // been unpoisoned by the the previous calls to - // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier. - __ Cmp(temp2, temp3); - - if (instruction->StaticTypeOfArrayIsObjectArray()) { - vixl::aarch64::Label do_put; - __ B(eq, &do_put); - // We do not need to emit a read barrier for the - // following heap reference load, as `temp2` is only used - // in a comparison with null below, and this reference - // is not kept afterwards. - // /* HeapReference<Class> */ temp = temp2->super_class_ - __ Ldr(temp, HeapOperand(temp2, super_offset)); - // If heap poisoning is enabled, no need to unpoison - // `temp`, as we are comparing against null below. - __ Cbnz(temp, slow_path->GetEntryLabel()); - __ Bind(&do_put); - } else { - __ B(ne, slow_path->GetEntryLabel()); - } - } - } else { - // Non read barrier code. + // Note that when Baker read barriers are enabled, the type + // checks are performed without read barriers. This is fine, + // even in the case where a class object is in the from-space + // after the flip, as a comparison involving such a type would + // not produce a false positive; it may of course produce a + // false negative, in which case we would take the ArraySet + // slow path. - Register temp2 = temps.AcquireSameSizeAs(array); - // /* HeapReference<Class> */ temp = array->klass_ - __ Ldr(temp, HeapOperand(array, class_offset)); - codegen_->MaybeRecordImplicitNullCheck(instruction); + Register temp2 = temps.AcquireSameSizeAs(array); + // /* HeapReference<Class> */ temp = array->klass_ + __ Ldr(temp, HeapOperand(array, class_offset)); + codegen_->MaybeRecordImplicitNullCheck(instruction); + GetAssembler()->MaybeUnpoisonHeapReference(temp); + + // /* HeapReference<Class> */ temp = temp->component_type_ + __ Ldr(temp, HeapOperand(temp, component_offset)); + // /* HeapReference<Class> */ temp2 = value->klass_ + __ Ldr(temp2, HeapOperand(Register(value), class_offset)); + // If heap poisoning is enabled, no need to unpoison `temp` + // nor `temp2`, as we are comparing two poisoned references. + __ Cmp(temp, temp2); + temps.Release(temp2); + + if (instruction->StaticTypeOfArrayIsObjectArray()) { + vixl::aarch64::Label do_put; + __ B(eq, &do_put); + // If heap poisoning is enabled, the `temp` reference has + // not been unpoisoned yet; unpoison it now. GetAssembler()->MaybeUnpoisonHeapReference(temp); - // /* HeapReference<Class> */ temp = temp->component_type_ - __ Ldr(temp, HeapOperand(temp, component_offset)); - // /* HeapReference<Class> */ temp2 = value->klass_ - __ Ldr(temp2, HeapOperand(Register(value), class_offset)); - // If heap poisoning is enabled, no need to unpoison `temp` - // nor `temp2`, as we are comparing two poisoned references. - __ Cmp(temp, temp2); - temps.Release(temp2); - - if (instruction->StaticTypeOfArrayIsObjectArray()) { - vixl::aarch64::Label do_put; - __ B(eq, &do_put); - // If heap poisoning is enabled, the `temp` reference has - // not been unpoisoned yet; unpoison it now. - GetAssembler()->MaybeUnpoisonHeapReference(temp); - - // /* HeapReference<Class> */ temp = temp->super_class_ - __ Ldr(temp, HeapOperand(temp, super_offset)); - // If heap poisoning is enabled, no need to unpoison - // `temp`, as we are comparing against null below. - __ Cbnz(temp, slow_path->GetEntryLabel()); - __ Bind(&do_put); - } else { - __ B(ne, slow_path->GetEntryLabel()); - } + // /* HeapReference<Class> */ temp = temp->super_class_ + __ Ldr(temp, HeapOperand(temp, super_offset)); + // If heap poisoning is enabled, no need to unpoison + // `temp`, as we are comparing against null below. + __ Cbnz(temp, slow_path->GetEntryLabel()); + __ Bind(&do_put); + } else { + __ B(ne, slow_path->GetEntryLabel()); } } @@ -4292,18 +4157,17 @@ HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind( } void LocationsBuilderARM64::VisitLoadString(HLoadString* load) { - LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier) - ? LocationSummary::kCallOnSlowPath + LocationSummary::CallKind call_kind = load->NeedsEnvironment() + ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); - if (kUseBakerReadBarrier && !load->NeedsEnvironment()) { - locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers. - } - if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) { locations->SetInAt(0, Location::RequiresRegister()); + InvokeRuntimeCallingConvention calling_convention; + locations->SetOut(calling_convention.GetReturnLocation(load->GetType())); + } else { + locations->SetOut(Location::RequiresRegister()); } - locations->SetOut(Location::RequiresRegister()); } void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { @@ -4347,10 +4211,10 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { } // TODO: Re-add the compiler code to do string dex cache lookup again. - SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load); - codegen_->AddSlowPath(slow_path); - __ B(slow_path->GetEntryLabel()); - __ Bind(slow_path->GetExitLabel()); + InvokeRuntimeCallingConvention calling_convention; + __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex()); + codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc()); + CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); } void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) { diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 4689ccb05c..b3b648f137 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -5238,7 +5238,6 @@ void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) { } if (needs_write_barrier) { // Temporary registers for the write barrier. - // These registers may be used for Baker read barriers too. locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too. // Ensure the card is in a byte register. locations->AddTemp(Location::RegisterLocation(ECX)); @@ -5328,105 +5327,40 @@ void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) { __ Bind(¬_null); } - if (kEmitCompilerReadBarrier) { - if (!kUseBakerReadBarrier) { - // When (non-Baker) read barriers are enabled, the type - // checking instrumentation requires two read barriers - // generated by CodeGeneratorX86::GenerateReadBarrierSlow: - // - // __ movl(temp2, temp); - // // /* HeapReference<Class> */ temp = temp->component_type_ - // __ movl(temp, Address(temp, component_offset)); - // codegen_->GenerateReadBarrierSlow( - // instruction, temp_loc, temp_loc, temp2_loc, component_offset); - // - // // /* HeapReference<Class> */ temp2 = register_value->klass_ - // __ movl(temp2, Address(register_value, class_offset)); - // codegen_->GenerateReadBarrierSlow( - // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc); - // - // __ cmpl(temp, temp2); - // - // However, the second read barrier may trash `temp`, as it - // is a temporary register, and as such would not be saved - // along with live registers before calling the runtime (nor - // restored afterwards). So in this case, we bail out and - // delegate the work to the array set slow path. - // - // TODO: Extend the register allocator to support a new - // "(locally) live temp" location so as to avoid always - // going into the slow path when read barriers are enabled? - // - // There is no such problem with Baker read barriers (see below). - __ jmp(slow_path->GetEntryLabel()); - } else { - Location temp2_loc = locations->GetTemp(1); - Register temp2 = temp2_loc.AsRegister<Register>(); - // /* HeapReference<Class> */ temp = array->klass_ - codegen_->GenerateFieldLoadWithBakerReadBarrier( - instruction, temp_loc, array, class_offset, /* needs_null_check */ true); - - // /* HeapReference<Class> */ temp = temp->component_type_ - codegen_->GenerateFieldLoadWithBakerReadBarrier( - instruction, temp_loc, temp, component_offset, /* needs_null_check */ false); - // Register `temp` is not trashed by the read barrier - // emitted by GenerateFieldLoadWithBakerReadBarrier below, - // as that method produces a call to a ReadBarrierMarkRegX - // entry point, which saves all potentially live registers, - // including temporaries such a `temp`. - // /* HeapReference<Class> */ temp2 = register_value->klass_ - codegen_->GenerateFieldLoadWithBakerReadBarrier( - instruction, temp2_loc, register_value, class_offset, /* needs_null_check */ false); - // If heap poisoning is enabled, `temp` and `temp2` have - // been unpoisoned by the the previous calls to - // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier. - __ cmpl(temp, temp2); - - if (instruction->StaticTypeOfArrayIsObjectArray()) { - __ j(kEqual, &do_put); - // We do not need to emit a read barrier for the - // following heap reference load, as `temp` is only used - // in a comparison with null below, and this reference - // is not kept afterwards. Also, if heap poisoning is - // enabled, there is no need to unpoison that heap - // reference for the same reason (comparison with null). - __ cmpl(Address(temp, super_offset), Immediate(0)); - __ j(kNotEqual, slow_path->GetEntryLabel()); - __ Bind(&do_put); - } else { - __ j(kNotEqual, slow_path->GetEntryLabel()); - } - } - } else { - // Non read barrier code. + // Note that when Baker read barriers are enabled, the type + // checks are performed without read barriers. This is fine, + // even in the case where a class object is in the from-space + // after the flip, as a comparison involving such a type would + // not produce a false positive; it may of course produce a + // false negative, in which case we would take the ArraySet + // slow path. - // /* HeapReference<Class> */ temp = array->klass_ - __ movl(temp, Address(array, class_offset)); - codegen_->MaybeRecordImplicitNullCheck(instruction); + // /* HeapReference<Class> */ temp = array->klass_ + __ movl(temp, Address(array, class_offset)); + codegen_->MaybeRecordImplicitNullCheck(instruction); + __ MaybeUnpoisonHeapReference(temp); + + // /* HeapReference<Class> */ temp = temp->component_type_ + __ movl(temp, Address(temp, component_offset)); + // If heap poisoning is enabled, no need to unpoison `temp` + // nor the object reference in `register_value->klass`, as + // we are comparing two poisoned references. + __ cmpl(temp, Address(register_value, class_offset)); + + if (instruction->StaticTypeOfArrayIsObjectArray()) { + __ j(kEqual, &do_put); + // If heap poisoning is enabled, the `temp` reference has + // not been unpoisoned yet; unpoison it now. __ MaybeUnpoisonHeapReference(temp); - // /* HeapReference<Class> */ temp = temp->component_type_ - __ movl(temp, Address(temp, component_offset)); - // If heap poisoning is enabled, no need to unpoison `temp` - // nor the object reference in `register_value->klass`, as - // we are comparing two poisoned references. - __ cmpl(temp, Address(register_value, class_offset)); - - if (instruction->StaticTypeOfArrayIsObjectArray()) { - __ j(kEqual, &do_put); - // If heap poisoning is enabled, the `temp` reference has - // not been unpoisoned yet; unpoison it now. - __ MaybeUnpoisonHeapReference(temp); - - // If heap poisoning is enabled, no need to unpoison the - // heap reference loaded below, as it is only used for a - // comparison with null. - __ cmpl(Address(temp, super_offset), Immediate(0)); - __ j(kNotEqual, slow_path->GetEntryLabel()); - __ Bind(&do_put); - } else { - __ j(kNotEqual, slow_path->GetEntryLabel()); - } + // If heap poisoning is enabled, no need to unpoison the + // heap reference loaded below, as it is only used for a + // comparison with null. + __ cmpl(Address(temp, super_offset), Immediate(0)); + __ j(kNotEqual, slow_path->GetEntryLabel()); + __ Bind(&do_put); + } else { + __ j(kNotEqual, slow_path->GetEntryLabel()); } } diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index a21a09ee8a..b3228f8b4e 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -4732,7 +4732,6 @@ void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) { if (needs_write_barrier) { // Temporary registers for the write barrier. - // These registers may be used for Baker read barriers too. locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too. locations->AddTemp(Location::RequiresRegister()); } @@ -4822,105 +4821,40 @@ void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) { __ Bind(¬_null); } - if (kEmitCompilerReadBarrier) { - if (!kUseBakerReadBarrier) { - // When (non-Baker) read barriers are enabled, the type - // checking instrumentation requires two read barriers - // generated by CodeGeneratorX86_64::GenerateReadBarrierSlow: - // - // __ movl(temp2, temp); - // // /* HeapReference<Class> */ temp = temp->component_type_ - // __ movl(temp, Address(temp, component_offset)); - // codegen_->GenerateReadBarrierSlow( - // instruction, temp_loc, temp_loc, temp2_loc, component_offset); - // - // // /* HeapReference<Class> */ temp2 = register_value->klass_ - // __ movl(temp2, Address(register_value, class_offset)); - // codegen_->GenerateReadBarrierSlow( - // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc); - // - // __ cmpl(temp, temp2); - // - // However, the second read barrier may trash `temp`, as it - // is a temporary register, and as such would not be saved - // along with live registers before calling the runtime (nor - // restored afterwards). So in this case, we bail out and - // delegate the work to the array set slow path. - // - // TODO: Extend the register allocator to support a new - // "(locally) live temp" location so as to avoid always - // going into the slow path when read barriers are enabled? - // - // There is no such problem with Baker read barriers (see below). - __ jmp(slow_path->GetEntryLabel()); - } else { - Location temp2_loc = locations->GetTemp(1); - CpuRegister temp2 = temp2_loc.AsRegister<CpuRegister>(); - // /* HeapReference<Class> */ temp = array->klass_ - codegen_->GenerateFieldLoadWithBakerReadBarrier( - instruction, temp_loc, array, class_offset, /* needs_null_check */ true); - - // /* HeapReference<Class> */ temp = temp->component_type_ - codegen_->GenerateFieldLoadWithBakerReadBarrier( - instruction, temp_loc, temp, component_offset, /* needs_null_check */ false); - // Register `temp` is not trashed by the read barrier - // emitted by GenerateFieldLoadWithBakerReadBarrier below, - // as that method produces a call to a ReadBarrierMarkRegX - // entry point, which saves all potentially live registers, - // including temporaries such a `temp`. - // /* HeapReference<Class> */ temp2 = register_value->klass_ - codegen_->GenerateFieldLoadWithBakerReadBarrier( - instruction, temp2_loc, register_value, class_offset, /* needs_null_check */ false); - // If heap poisoning is enabled, `temp` and `temp2` have - // been unpoisoned by the the previous calls to - // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier. - __ cmpl(temp, temp2); - - if (instruction->StaticTypeOfArrayIsObjectArray()) { - __ j(kEqual, &do_put); - // We do not need to emit a read barrier for the - // following heap reference load, as `temp` is only used - // in a comparison with null below, and this reference - // is not kept afterwards. Also, if heap poisoning is - // enabled, there is no need to unpoison that heap - // reference for the same reason (comparison with null). - __ cmpl(Address(temp, super_offset), Immediate(0)); - __ j(kNotEqual, slow_path->GetEntryLabel()); - __ Bind(&do_put); - } else { - __ j(kNotEqual, slow_path->GetEntryLabel()); - } - } - } else { - // Non read barrier code. + // Note that when Baker read barriers are enabled, the type + // checks are performed without read barriers. This is fine, + // even in the case where a class object is in the from-space + // after the flip, as a comparison involving such a type would + // not produce a false positive; it may of course produce a + // false negative, in which case we would take the ArraySet + // slow path. - // /* HeapReference<Class> */ temp = array->klass_ - __ movl(temp, Address(array, class_offset)); - codegen_->MaybeRecordImplicitNullCheck(instruction); + // /* HeapReference<Class> */ temp = array->klass_ + __ movl(temp, Address(array, class_offset)); + codegen_->MaybeRecordImplicitNullCheck(instruction); + __ MaybeUnpoisonHeapReference(temp); + + // /* HeapReference<Class> */ temp = temp->component_type_ + __ movl(temp, Address(temp, component_offset)); + // If heap poisoning is enabled, no need to unpoison `temp` + // nor the object reference in `register_value->klass`, as + // we are comparing two poisoned references. + __ cmpl(temp, Address(register_value, class_offset)); + + if (instruction->StaticTypeOfArrayIsObjectArray()) { + __ j(kEqual, &do_put); + // If heap poisoning is enabled, the `temp` reference has + // not been unpoisoned yet; unpoison it now. __ MaybeUnpoisonHeapReference(temp); - // /* HeapReference<Class> */ temp = temp->component_type_ - __ movl(temp, Address(temp, component_offset)); - // If heap poisoning is enabled, no need to unpoison `temp` - // nor the object reference in `register_value->klass`, as - // we are comparing two poisoned references. - __ cmpl(temp, Address(register_value, class_offset)); - - if (instruction->StaticTypeOfArrayIsObjectArray()) { - __ j(kEqual, &do_put); - // If heap poisoning is enabled, the `temp` reference has - // not been unpoisoned yet; unpoison it now. - __ MaybeUnpoisonHeapReference(temp); - - // If heap poisoning is enabled, no need to unpoison the - // heap reference loaded below, as it is only used for a - // comparison with null. - __ cmpl(Address(temp, super_offset), Immediate(0)); - __ j(kNotEqual, slow_path->GetEntryLabel()); - __ Bind(&do_put); - } else { - __ j(kNotEqual, slow_path->GetEntryLabel()); - } + // If heap poisoning is enabled, no need to unpoison the + // heap reference loaded below, as it is only used for a + // comparison with null. + __ cmpl(Address(temp, super_offset), Immediate(0)); + __ j(kNotEqual, slow_path->GetEntryLabel()); + __ Bind(&do_put); + } else { + __ j(kNotEqual, slow_path->GetEntryLabel()); } } diff --git a/test/538-checker-embed-constants/src/Main.java b/test/538-checker-embed-constants/src/Main.java index f6713a2962..04a12fa212 100644 --- a/test/538-checker-embed-constants/src/Main.java +++ b/test/538-checker-embed-constants/src/Main.java @@ -102,12 +102,12 @@ public class Main { /// CHECK-START-ARM: long Main.and255(long) disassembly (after) /// CHECK-NOT: movs {{r\d+}}, #255 - /// CHECK-NOT: and - /// CHECK-NOT: bic + /// CHECK-NOT: and{{(\.w)?}} + /// CHECK-NOT: bic{{(\.w)?}} /// CHECK-DAG: and {{r\d+}}, {{r\d+}}, #255 /// CHECK-DAG: movs {{r\d+}}, #0 - /// CHECK-NOT: and - /// CHECK-NOT: bic + /// CHECK-NOT: and{{(\.w)?}} + /// CHECK-NOT: bic{{(\.w)?}} public static long and255(long arg) { return arg & 255L; @@ -115,12 +115,13 @@ public class Main { /// CHECK-START-ARM: long Main.and511(long) disassembly (after) /// CHECK: movw {{r\d+}}, #511 - /// CHECK-NOT: and - /// CHECK-NOT: bic - /// CHECK-DAG: and{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} - /// CHECK-DAG: movs {{r\d+}}, #0 - /// CHECK-NOT: and - /// CHECK-NOT: bic + /// CHECK-NEXT: movs {{r\d+}}, #0 + /// CHECK-NOT: and{{(\.w)?}} + /// CHECK-NOT: bic{{(\.w)?}} + /// CHECK: and{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} + /// CHECK-NEXT: and{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} + /// CHECK-NOT: and{{(\.w)?}} + /// CHECK-NOT: bic{{(\.w)?}} public static long and511(long arg) { return arg & 511L; @@ -128,11 +129,11 @@ public class Main { /// CHECK-START-ARM: long Main.andNot15(long) disassembly (after) /// CHECK-NOT: mvn {{r\d+}}, #15 - /// CHECK-NOT: and - /// CHECK-NOT: bic + /// CHECK-NOT: and{{(\.w)?}} + /// CHECK-NOT: bic{{(\.w)?}} /// CHECK: bic {{r\d+}}, {{r\d+}}, #15 - /// CHECK-NOT: and - /// CHECK-NOT: bic + /// CHECK-NOT: and{{(\.w)?}} + /// CHECK-NOT: bic{{(\.w)?}} public static long andNot15(long arg) { return arg & ~15L; @@ -141,12 +142,12 @@ public class Main { /// CHECK-START-ARM: long Main.and0xfffffff00000000f(long) disassembly (after) /// CHECK-NOT: movs {{r\d+}}, #15 /// CHECK-NOT: mvn {{r\d+}}, #15 - /// CHECK-NOT: and - /// CHECK-NOT: bic + /// CHECK-NOT: and{{(\.w)?}} + /// CHECK-NOT: bic{{(\.w)?}} /// CHECK-DAG: and {{r\d+}}, {{r\d+}}, #15 /// CHECK-DAG: bic {{r\d+}}, {{r\d+}}, #15 - /// CHECK-NOT: and - /// CHECK-NOT: bic + /// CHECK-NOT: and{{(\.w)?}} + /// CHECK-NOT: bic{{(\.w)?}} public static long and0xfffffff00000000f(long arg) { return arg & 0xfffffff00000000fL; @@ -154,10 +155,10 @@ public class Main { /// CHECK-START-ARM: long Main.or255(long) disassembly (after) /// CHECK-NOT: movs {{r\d+}}, #255 - /// CHECK-NOT: orr + /// CHECK-NOT: orr{{(\.w)?}} /// CHECK-NOT: orn /// CHECK: orr {{r\d+}}, {{r\d+}}, #255 - /// CHECK-NOT: orr + /// CHECK-NOT: orr{{(\.w)?}} /// CHECK-NOT: orn public static long or255(long arg) { @@ -166,10 +167,12 @@ public class Main { /// CHECK-START-ARM: long Main.or511(long) disassembly (after) /// CHECK: movw {{r\d+}}, #511 - /// CHECK-NOT: orr + /// CHECK-NEXT: movs {{r\d+}}, #0 + /// CHECK-NOT: orr{{(\.w)?}} /// CHECK-NOT: orn /// CHECK: orr{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} - /// CHECK-NOT: orr + /// CHECK-NEXT: orr{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} + /// CHECK-NOT: orr{{(\.w)?}} /// CHECK-NOT: orn public static long or511(long arg) { @@ -178,11 +181,11 @@ public class Main { /// CHECK-START-ARM: long Main.orNot15(long) disassembly (after) /// CHECK-NOT: mvn {{r\d+}}, #15 - /// CHECK-NOT: orr + /// CHECK-NOT: orr{{(\.w)?}} /// CHECK-NOT: orn /// CHECK-DAG: orn {{r\d+}}, {{r\d+}}, #15 /// CHECK-DAG: mvn {{r\d+}}, #0 - /// CHECK-NOT: orr + /// CHECK-NOT: orr{{(\.w)?}} /// CHECK-NOT: orn public static long orNot15(long arg) { @@ -192,11 +195,11 @@ public class Main { /// CHECK-START-ARM: long Main.or0xfffffff00000000f(long) disassembly (after) /// CHECK-NOT: movs {{r\d+}}, #15 /// CHECK-NOT: mvn {{r\d+}}, #15 - /// CHECK-NOT: orr + /// CHECK-NOT: orr{{(\.w)?}} /// CHECK-NOT: orn /// CHECK-DAG: orr {{r\d+}}, {{r\d+}}, #15 /// CHECK-DAG: orn {{r\d+}}, {{r\d+}}, #15 - /// CHECK-NOT: orr + /// CHECK-NOT: orr{{(\.w)?}} /// CHECK-NOT: orn public static long or0xfffffff00000000f(long arg) { @@ -205,9 +208,9 @@ public class Main { /// CHECK-START-ARM: long Main.xor255(long) disassembly (after) /// CHECK-NOT: movs {{r\d+}}, #255 - /// CHECK-NOT: eor + /// CHECK-NOT: eor{{(\.w)?}} /// CHECK: eor {{r\d+}}, {{r\d+}}, #255 - /// CHECK-NOT: eor + /// CHECK-NOT: eor{{(\.w)?}} public static long xor255(long arg) { return arg ^ 255L; @@ -215,9 +218,11 @@ public class Main { /// CHECK-START-ARM: long Main.xor511(long) disassembly (after) /// CHECK: movw {{r\d+}}, #511 - /// CHECK-NOT: eor - /// CHECK-DAG: eor{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} - /// CHECK-NOT: eor + /// CHECK-NEXT: movs {{r\d+}}, #0 + /// CHECK-NOT: eor{{(\.w)?}} + /// CHECK: eor{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} + /// CHECK-NEXT: eor{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} + /// CHECK-NOT: eor{{(\.w)?}} public static long xor511(long arg) { return arg ^ 511L; @@ -226,10 +231,10 @@ public class Main { /// CHECK-START-ARM: long Main.xorNot15(long) disassembly (after) /// CHECK-DAG: mvn {{r\d+}}, #15 /// CHECK-DAG: mov.w {{r\d+}}, #-1 - /// CHECK-NOT: eor + /// CHECK-NOT: eor{{(\.w)?}} /// CHECK-DAG: eor{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} /// CHECK-DAG: eor{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} - /// CHECK-NOT: eor + /// CHECK-NOT: eor{{(\.w)?}} public static long xorNot15(long arg) { return arg ^ ~15L; @@ -239,10 +244,10 @@ public class Main { /// CHECK-START-ARM: long Main.xor0xfffffff00000000f(long) disassembly (after) /// CHECK-DAG: movs {{r\d+}}, #15 /// CHECK-DAG: mvn {{r\d+}}, #15 - /// CHECK-NOT: eor + /// CHECK-NOT: eor{{(\.w)?}} /// CHECK-DAG: eor{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} /// CHECK-DAG: eor{{(\.w)?}} {{r\d+}}, {{r\d+}}, {{r\d+}} - /// CHECK-NOT: eor + /// CHECK-NOT: eor{{(\.w)?}} public static long xor0xfffffff00000000f(long arg) { return arg ^ 0xfffffff00000000fL; @@ -251,10 +256,10 @@ public class Main { /// CHECK-START-ARM: long Main.xor0xf00000000000000f(long) disassembly (after) /// CHECK-NOT: movs {{r\d+}}, #15 /// CHECK-NOT: mov.w {{r\d+}}, #-268435456 - /// CHECK-NOT: eor + /// CHECK-NOT: eor{{(\.w)?}} /// CHECK-DAG: eor {{r\d+}}, {{r\d+}}, #15 /// CHECK-DAG: eor {{r\d+}}, {{r\d+}}, #-268435456 - /// CHECK-NOT: eor + /// CHECK-NOT: eor{{(\.w)?}} public static long xor0xf00000000000000f(long arg) { return arg ^ 0xf00000000000000fL; |