< prev index next >

src/hotspot/cpu/riscv/gc/shenandoah/shenandoahBarrierSetAssembler_riscv.cpp

Print this page

 853       }
 854     }
 855   }
 856 
 857   // If we could not find a non-live register, select the live fallback:
 858   if (tmp == noreg) {
 859     tmp = fallback_live;
 860     selected_live = true;
 861   } else {
 862     selected_live = false;
 863   }
 864 
 865   assert(tmp != noreg, "successfully selected");
 866   assert_different_registers(tmp, reg1);
 867   assert_different_registers(tmp, addr.base());
 868   assert_different_registers(tmp, addr.index());
 869   return tmp;
 870 }
 871 
 872 void ShenandoahBarrierStubC2::enter_if_gc_state(MacroAssembler& masm, const char test_state) {
 873   int bit_to_check = ShenandoahThreadLocalData::gc_state_to_fast_bit(test_state);
 874   Address gc_state_fast(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_fast_offset()));
 875   __ lbu(t0, gc_state_fast);
 876   __ test_bit(t0, t0, bit_to_check);
 877   __ bnez(t0, *entry());
 878 
 879   // Fast path falls through here when the barrier is not needed.





 880   __ bind(*continuation());
 881 }
 882 













 883 #undef __
 884 #define __ masm->
 885 
 886 void ShenandoahBarrierSetAssembler::compare_and_set_c2(const MachNode* node, MacroAssembler* masm, Register res, Register addr,
 887     Register oldval, Register newval, Register tmp, bool exchange, bool maybe_null, bool narrow, bool weak) {
 888   const Assembler::Aqrl acquire = needs_acquiring_load_reserved(node) ? Assembler::aq : Assembler::relaxed;
 889   const Assembler::Aqrl release = Assembler::rl;
 890 
 891   // Pre-barrier covers several things:
 892   //  a. Avoids false positives from CAS encountering to-space memory values.
 893   //  b. Satisfies the need for LRB for the CAE result.
 894   //  c. Records old value for the sake of SATB.
 895   //
 896   // (a) and (b) are covered because load barrier does memory location fixup.
 897   // (c) is covered by KA on the current memory value.
 898   if (ShenandoahBarrierStubC2::needs_slow_barrier(node)) {
 899     ShenandoahBarrierStubC2* const stub = ShenandoahBarrierStubC2::create(node, tmp, Address(addr, 0), narrow, /* do_load: */ true);
 900     char check = 0;
 901     check |= ShenandoahBarrierStubC2::needs_keep_alive_barrier(node) ? ShenandoahHeap::MARKING : 0;
 902     check |= ShenandoahBarrierStubC2::needs_load_ref_barrier(node)   ? ShenandoahHeap::HAS_FORWARDED : 0;

1210       __ mv(c_rarg1, t0);
1211     } else {
1212       assert_different_registers(c_rarg1, obj);
1213       __ la(c_rarg1, addr);
1214       __ mv(c_rarg0, obj);
1215     }
1216 
1217     // Get address of runtime LRB entry and call it
1218     __ rt_call(lrb_runtime_entry_addr());
1219 
1220     // If we loaded the object in the stub it means we don't need to return it
1221     // to fastpath, so no need to make this mov.
1222     if (!_do_load) {
1223       __ mv(obj, x10);
1224     }
1225   }
1226 
1227   __ bind(L_done);
1228 }
1229 
1230 void ShenandoahBarrierStubC2::post_init(int offset) {
1231   // Do nothing.
1232 }
1233 #endif // COMPILER2

 853       }
 854     }
 855   }
 856 
 857   // If we could not find a non-live register, select the live fallback:
 858   if (tmp == noreg) {
 859     tmp = fallback_live;
 860     selected_live = true;
 861   } else {
 862     selected_live = false;
 863   }
 864 
 865   assert(tmp != noreg, "successfully selected");
 866   assert_different_registers(tmp, reg1);
 867   assert_different_registers(tmp, addr.base());
 868   assert_different_registers(tmp, addr.index());
 869   return tmp;
 870 }
 871 
 872 void ShenandoahBarrierStubC2::enter_if_gc_state(MacroAssembler& masm, const char test_state) {
 873   // Emit the unconditional branch in the first version of the method.
 874   // Let the rest of runtime figure out how to manage it.
 875   __ relocate(barrier_Relocation::spec());
 876   __ j(*entry());

 877 
 878 #ifdef ASSERT
 879   Address gc_state_fast(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_fast_offset()));
 880   __ ld(t0, gc_state_fast);
 881   __ beqz(t0, *continuation());
 882   __ illegal_instruction(Assembler::csr::time); // Correctness bug: barrier is NOP-ed, but heap is NOT IDLE
 883 #endif
 884   __ bind(*continuation());
 885 }
 886 
 887 address ShenandoahBarrierSetAssembler::parse_stub_address(address pc) {
 888   Unimplemented();
 889   return nullptr;
 890 }
 891 
 892 void ShenandoahBarrierSetAssembler::patch_branch_to_nop(address pc) {
 893   Unimplemented();
 894 }
 895 
 896 void ShenandoahBarrierSetAssembler::patch_nop_to_branch(address pc, address stub_addr) {
 897   Unimplemented();
 898 }
 899 
 900 #undef __
 901 #define __ masm->
 902 
 903 void ShenandoahBarrierSetAssembler::compare_and_set_c2(const MachNode* node, MacroAssembler* masm, Register res, Register addr,
 904     Register oldval, Register newval, Register tmp, bool exchange, bool maybe_null, bool narrow, bool weak) {
 905   const Assembler::Aqrl acquire = needs_acquiring_load_reserved(node) ? Assembler::aq : Assembler::relaxed;
 906   const Assembler::Aqrl release = Assembler::rl;
 907 
 908   // Pre-barrier covers several things:
 909   //  a. Avoids false positives from CAS encountering to-space memory values.
 910   //  b. Satisfies the need for LRB for the CAE result.
 911   //  c. Records old value for the sake of SATB.
 912   //
 913   // (a) and (b) are covered because load barrier does memory location fixup.
 914   // (c) is covered by KA on the current memory value.
 915   if (ShenandoahBarrierStubC2::needs_slow_barrier(node)) {
 916     ShenandoahBarrierStubC2* const stub = ShenandoahBarrierStubC2::create(node, tmp, Address(addr, 0), narrow, /* do_load: */ true);
 917     char check = 0;
 918     check |= ShenandoahBarrierStubC2::needs_keep_alive_barrier(node) ? ShenandoahHeap::MARKING : 0;
 919     check |= ShenandoahBarrierStubC2::needs_load_ref_barrier(node)   ? ShenandoahHeap::HAS_FORWARDED : 0;

1227       __ mv(c_rarg1, t0);
1228     } else {
1229       assert_different_registers(c_rarg1, obj);
1230       __ la(c_rarg1, addr);
1231       __ mv(c_rarg0, obj);
1232     }
1233 
1234     // Get address of runtime LRB entry and call it
1235     __ rt_call(lrb_runtime_entry_addr());
1236 
1237     // If we loaded the object in the stub it means we don't need to return it
1238     // to fastpath, so no need to make this mov.
1239     if (!_do_load) {
1240       __ mv(obj, x10);
1241     }
1242   }
1243 
1244   __ bind(L_done);
1245 }
1246 



1247 #endif // COMPILER2
< prev index next >