25 */
26
27 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"
28 #include "gc/shenandoah/mode/shenandoahMode.hpp"
29 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
30 #include "gc/shenandoah/shenandoahBarrierSetAssembler.hpp"
31 #include "gc/shenandoah/shenandoahForwarding.hpp"
32 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
33 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
34 #include "gc/shenandoah/shenandoahRuntime.hpp"
35 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
36 #include "interpreter/interp_masm.hpp"
37 #include "interpreter/interpreter.hpp"
38 #include "runtime/javaThread.hpp"
39 #include "runtime/sharedRuntime.hpp"
40 #ifdef COMPILER1
41 #include "c1/c1_LIRAssembler.hpp"
42 #include "c1/c1_MacroAssembler.hpp"
43 #include "gc/shenandoah/c1/shenandoahBarrierSetC1.hpp"
44 #endif
45
46 #define __ masm->
47
48 void ShenandoahBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
49 Register src, Register dst, Register count, RegSet saved_regs) {
50 if (is_oop) {
51 bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0;
52 if ((ShenandoahSATBBarrier && !dest_uninitialized) || ShenandoahLoadRefBarrier) {
53
54 Label done;
55
56 // Avoid calling runtime if count == 0
57 __ beqz(count, done);
58
59 // Is GC active?
60 Address gc_state(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
61 assert_different_registers(src, dst, count, t0);
62
63 assert(!saved_regs.contains(t0), "Sanity: about to clobber t0");
64
751 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak_narrow);
752 } else {
753 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak);
754 }
755 } else {
756 assert(is_phantom, "only remaining strength");
757 assert(is_native, "phantom must only be called off-heap");
758 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom);
759 }
760 __ rt_call(target);
761 __ mv(t0, x10);
762 __ pop_call_clobbered_registers();
763 __ mv(x10, t0);
764
765 __ epilogue();
766 }
767
768 #undef __
769
770 #endif // COMPILER1
|
25 */
26
27 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"
28 #include "gc/shenandoah/mode/shenandoahMode.hpp"
29 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
30 #include "gc/shenandoah/shenandoahBarrierSetAssembler.hpp"
31 #include "gc/shenandoah/shenandoahForwarding.hpp"
32 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
33 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
34 #include "gc/shenandoah/shenandoahRuntime.hpp"
35 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
36 #include "interpreter/interp_masm.hpp"
37 #include "interpreter/interpreter.hpp"
38 #include "runtime/javaThread.hpp"
39 #include "runtime/sharedRuntime.hpp"
40 #ifdef COMPILER1
41 #include "c1/c1_LIRAssembler.hpp"
42 #include "c1/c1_MacroAssembler.hpp"
43 #include "gc/shenandoah/c1/shenandoahBarrierSetC1.hpp"
44 #endif
45 #ifdef COMPILER2
46 #include "gc/shenandoah/c2/shenandoahBarrierSetC2.hpp"
47 #include "opto/output.hpp"
48 #include "utilities/population_count.hpp"
49 #include "utilities/powerOfTwo.hpp"
50 #endif
51
52 #define __ masm->
53
54 void ShenandoahBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
55 Register src, Register dst, Register count, RegSet saved_regs) {
56 if (is_oop) {
57 bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0;
58 if ((ShenandoahSATBBarrier && !dest_uninitialized) || ShenandoahLoadRefBarrier) {
59
60 Label done;
61
62 // Avoid calling runtime if count == 0
63 __ beqz(count, done);
64
65 // Is GC active?
66 Address gc_state(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
67 assert_different_registers(src, dst, count, t0);
68
69 assert(!saved_regs.contains(t0), "Sanity: about to clobber t0");
70
757 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak_narrow);
758 } else {
759 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak);
760 }
761 } else {
762 assert(is_phantom, "only remaining strength");
763 assert(is_native, "phantom must only be called off-heap");
764 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom);
765 }
766 __ rt_call(target);
767 __ mv(t0, x10);
768 __ pop_call_clobbered_registers();
769 __ mv(x10, t0);
770
771 __ epilogue();
772 }
773
774 #undef __
775
776 #endif // COMPILER1
777
778 #ifdef COMPILER2
779
780 #undef __
781 #define __ masm.
782
783 int ShenandoahBarrierStubC2::available_gp_registers() {
784 return Register::number_of_registers;
785 }
786
787 bool ShenandoahBarrierStubC2::is_special_register(Register r) {
788 return r == fp || r == sp ||
789 r == xheapbase || r == xthread ||
790 r == t0 || r == t1 || r == zr;
791 }
792
793 void ShenandoahBarrierStubC2::enter_if_gc_state(MacroAssembler& masm, const char test_state, Register tmp) {
794 Assembler::InlineSkippedInstructionsCounter skip_counter(&masm);
795
796 Address gc_state_fast(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_fast_array_offset(test_state)));
797 __ lbu(t0, gc_state_fast);
798 __ beqz(t0, *continuation());
799 __ j(*entry());
800
801 // This is were the slowpath stub will return to or the code above will
802 // jump to if the checks are false
803 __ bind(*continuation());
804 }
805
806 #undef __
807 #define __ masm->
808
809 void ShenandoahBarrierSetAssembler::compare_and_set_c2(const MachNode* node, MacroAssembler* masm, Register res, Register addr,
810 Register oldval, Register newval, Register tmp, bool exchange, bool narrow, bool is_acquire) {
811 const Assembler::Aqrl acquire = is_acquire ? Assembler::aq : Assembler::relaxed;
812 const Assembler::Aqrl release = Assembler::rl;
813
814 // Pre-barrier covers several things:
815 // a. Avoids false positives from CAS encountering to-space memory values.
816 // b. Satisfies the need for LRB for the CAE result.
817 // c. Records old value for the sake of SATB.
818 //
819 // (a) and (b) are covered because load barrier does memory location fixup.
820 // (c) is covered by KA on the current memory value.
821 if (ShenandoahBarrierStubC2::needs_slow_barrier(node)) {
822 ShenandoahBarrierStubC2* const stub = ShenandoahBarrierStubC2::create(node, tmp, Address(addr, 0), narrow, /* do_load: */ true);
823 char check = 0;
824 check |= ShenandoahBarrierStubC2::needs_keep_alive_barrier(node) ? ShenandoahHeap::MARKING : 0;
825 check |= ShenandoahBarrierStubC2::needs_load_ref_barrier(node) ? ShenandoahHeap::HAS_FORWARDED : 0;
826 assert(!ShenandoahBarrierStubC2::needs_load_ref_barrier_weak(node), "Not supported for CAS/CAE");
827 stub->enter_if_gc_state(*masm, check);
828 }
829
830 // Existing RISCV cmpxchg_oop already handles Shenandoah forwarded-value retry logic.
831 // It returns:
832 // - boolean 0/1 for CAS (!exchange)
833 // - loaded/current value for CAE (exchange)
834 ShenandoahBarrierSet::assembler()->cmpxchg_oop(masm, addr, oldval, newval, acquire, release, exchange /* is_cae */, res);
835
836 // Post-barrier deals with card updates.
837 card_barrier_c2(node, masm, Address(addr, 0));
838 }
839
840 void ShenandoahBarrierSetAssembler::get_and_set_c2(const MachNode* node, MacroAssembler* masm, Register preval,
841 Register newval, Register addr, Register tmp, bool is_acquire) {
842 const bool is_narrow = node->bottom_type()->isa_narrowoop();
843
844 // Pre-barrier covers several things:
845 // a. Satisfies the need for LRB for the GAS result.
846 // b. Records old value for the sake of SATB.
847 //
848 // (a) is covered because load barrier does memory location fixup.
849 // (b) is covered by KA on the current memory value.
850 if (ShenandoahBarrierStubC2::needs_slow_barrier(node)) {
851 ShenandoahBarrierStubC2* const stub = ShenandoahBarrierStubC2::create(node, tmp, Address(addr, 0), is_narrow, /* do_load: */ true);
852 char check = 0;
853 check |= ShenandoahBarrierStubC2::needs_keep_alive_barrier(node) ? ShenandoahHeap::MARKING : 0;
854 check |= ShenandoahBarrierStubC2::needs_load_ref_barrier(node) ? ShenandoahHeap::HAS_FORWARDED : 0;
855 assert(!ShenandoahBarrierStubC2::needs_load_ref_barrier_weak(node), "Not supported for GAS");
856 stub->enter_if_gc_state(*masm, check);
857 }
858
859 if (is_narrow) {
860 if (is_acquire) {
861 __ atomic_xchgalwu(preval, newval, addr);
862 } else {
863 __ atomic_xchgwu(preval, newval, addr);
864 }
865 } else {
866 if (is_acquire) {
867 __ atomic_xchgal(preval, newval, addr);
868 } else {
869 __ atomic_xchg(preval, newval, addr);
870 }
871 }
872
873 // Post-barrier deals with card updates.
874 card_barrier_c2(node, masm, Address(addr, 0));
875 }
876
877 void ShenandoahBarrierSetAssembler::store_c2(const MachNode* node, MacroAssembler* masm, Address dst, bool dst_narrow,
878 Register src, bool src_narrow, Register tmp) {
879
880 // Pre-barrier: SATB / keep-alive on current value in memory.
881 if (ShenandoahBarrierStubC2::needs_slow_barrier(node)) {
882 assert(!ShenandoahBarrierStubC2::needs_load_ref_barrier(node), "Should not be required for stores");
883 ShenandoahBarrierStubC2* const stub = ShenandoahBarrierStubC2::create(node, tmp, dst, dst_narrow, /* do_load: */ true);
884 stub->enter_if_gc_state(*masm, ShenandoahHeap::MARKING);
885 }
886
887 // Do the actual store
888 if (dst_narrow) {
889 if (!src_narrow) {
890 // Need to encode into tmp, because we cannot clobber src.
891 assert(tmp != noreg, "need temp register");
892 if (ShenandoahBarrierStubC2::maybe_null(node)) {
893 __ encode_heap_oop(tmp, src);
894 } else {
895 __ encode_heap_oop_not_null(tmp, src);
896 }
897 src = tmp;
898 }
899 __ sw(src, dst);
900 } else {
901 __ sd(src, dst);
902 }
903
904 // Post-barrier: card updates.
905 card_barrier_c2(node, masm, dst);
906 }
907
908 void ShenandoahBarrierSetAssembler::load_c2(const MachNode* node, MacroAssembler* masm, Register dst, Address src, bool is_narrow) {
909 // Do the actual load. This load is the candidate for implicit null check, and MUST come first.
910 if (is_narrow) {
911 __ lwu(dst, src);
912 } else {
913 __ ld(dst, src);
914 }
915
916 // Post-barrier: LRB / KA / weak-root processing.
917 if (ShenandoahBarrierStubC2::needs_slow_barrier(node)) {
918 ShenandoahBarrierStubC2* const stub = ShenandoahBarrierStubC2::create(node, dst, src, is_narrow, /* do_load: */ false);
919 char check = 0;
920 check |= ShenandoahBarrierStubC2::needs_keep_alive_barrier(node) ? ShenandoahHeap::MARKING : 0;
921 check |= ShenandoahBarrierStubC2::needs_load_ref_barrier(node) ? ShenandoahHeap::HAS_FORWARDED : 0;
922 check |= ShenandoahBarrierStubC2::needs_load_ref_barrier_weak(node) ? ShenandoahHeap::WEAK_ROOTS : 0;
923 stub->enter_if_gc_state(*masm, check);
924 }
925 }
926
927 void ShenandoahBarrierSetAssembler::card_barrier_c2(const MachNode* node, MacroAssembler* masm, Address address) {
928 if (!ShenandoahBarrierStubC2::needs_card_barrier(node)) {
929 return;
930 }
931
932 assert(CardTable::dirty_card_val() == 0, "must be");
933 Assembler::InlineSkippedInstructionsCounter skip_counter(masm);
934
935 // t0 = card table base (holder)
936 Address curr_ct_holder_addr(xthread, in_bytes(ShenandoahThreadLocalData::card_table_offset()));
937 __ ld(t0, curr_ct_holder_addr);
938
939 // t1 = effective address
940 __ la(t1, address);
941
942 // t1 = &card_table[ addr >> CardTable::card_shift() ] ; card index
943 __ srli(t1, t1, CardTable::card_shift());
944 __ add(t1, t1, t0);
945
946 if (UseCondCardMark) {
947 Label L_already_dirty;
948 __ lbu(t0, Address(t1));
949 __ beqz(t0, L_already_dirty);
950 __ sb(zr, Address(t1));
951 __ bind(L_already_dirty);
952 } else {
953 __ sb(zr, Address(t1));
954 }
955 }
956
957 #undef __
958 #define __ masm.
959
960 void ShenandoahBarrierStubC2::post_init() {
961 // If we are in scratch emit mode we assume worst case,
962 // and force the use of trampolines
963 PhaseOutput* const output = Compile::current()->output();
964 if (output->in_scratch_emit_size()) {
965 return;
966 }
967 }
968
969 void ShenandoahBarrierStubC2::emit_code(MacroAssembler& masm) {
970 Assembler::InlineSkippedInstructionsCounter skip_counter(&masm);
971 assert(_needs_keep_alive_barrier || _needs_load_ref_barrier, "Why are you here?");
972
973 __ bind(*entry());
974
975 // If we need to load ourselves, do it here.
976 if (_do_load) {
977 if (_narrow) {
978 __ lwu(_obj, _addr);
979 } else {
980 __ ld(_obj, _addr);
981 }
982 }
983
984 // If the object is null, there is no point in applying barriers.
985 maybe_far_jump_if_zero(masm, _obj, continuation());
986
987 // Go for barriers. Barriers can return straight to continuation, as long
988 // as another barrier is not needed and we can reach the fastpath.
989 if (_needs_keep_alive_barrier && _needs_load_ref_barrier) {
990 keepalive(masm, nullptr);
991 lrb(masm, continuation());
992 } else if (_needs_keep_alive_barrier) {
993 keepalive(masm, continuation());
994 } else if (_needs_load_ref_barrier) {
995 lrb(masm, continuation());
996 } else {
997 ShouldNotReachHere();
998 }
999 }
1000
1001 void ShenandoahBarrierStubC2::maybe_far_jump_if_zero(MacroAssembler& masm, Register reg, Label* L_done) {
1002 Label L_short_jump;
1003 __ bnez(reg, L_short_jump);
1004 __ j(*L_done);
1005 __ bind(L_short_jump);
1006 }
1007
1008 void ShenandoahBarrierStubC2::keepalive(MacroAssembler& masm, Label* L_done) {
1009 Address index(xthread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
1010 Address buffer(xthread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset()));
1011
1012 Label L_through, L_slowpath;
1013
1014 Register tmp1 = t0;
1015 Register tmp2 = t1;
1016 assert_different_registers(tmp1, tmp2, _obj, _addr.base(), _addr.index());
1017
1018 // If another barrier is enabled as well, do a runtime check for a specific barrier.
1019 if (_needs_load_ref_barrier) {
1020 Address gc_state_fast(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_fast_array_offset(ShenandoahHeap::MARKING)));
1021 __ lbu(t0, gc_state_fast);
1022 if (L_done != nullptr) {
1023 maybe_far_jump_if_zero(masm, tmp1, L_done);
1024 } else {
1025 __ beqz(tmp1, L_through);
1026 }
1027 }
1028
1029 // Fast-path: put object into buffer.
1030 // If buffer is already full, go slow.
1031 __ ld(tmp1, index);
1032 __ beqz(tmp1, L_slowpath);
1033 __ subi(tmp1, tmp1, wordSize);
1034 __ sd(tmp1, index);
1035 __ ld(tmp2, buffer);
1036
1037 // If object is narrow, we need to unpack it before inserting into buffer.
1038 __ add(tmp1, tmp1, tmp2);
1039 if (_narrow) {
1040 __ decode_heap_oop_not_null(tmp2, _obj);
1041 __ sd(tmp2, Address(tmp1));
1042 } else {
1043 __ sd(_obj, Address(tmp1));
1044 }
1045
1046 // Fast-path exits here.
1047 if (L_done != nullptr) {
1048 __ j(*L_done);
1049 } else {
1050 __ j(L_through);
1051 }
1052
1053 // Slow-path: call runtime to handle.
1054 __ bind(L_slowpath);
1055
1056 // If this stub also supports LRB then we need to preserve _obj to use it there.
1057 if (_needs_load_ref_barrier) {
1058 preserve(_obj);
1059 } else {
1060 dont_preserve(_obj);
1061 }
1062
1063 {
1064 SaveLiveRegisters slr(&masm, this);
1065
1066 // Go to runtime and handle the rest there.
1067 __ mv(c_rarg0, _obj);
1068 __ rt_call(keepalive_runtime_entry_addr());
1069 }
1070
1071 if (L_done != nullptr) {
1072 __ j(*L_done);
1073 } else {
1074 __ bind(L_through);
1075 }
1076 }
1077
1078 void ShenandoahBarrierStubC2::lrb(MacroAssembler& masm, Label* L_done) {
1079 assert(L_done != nullptr, "Must be set");
1080
1081 Label L_slow;
1082
1083 Register tmp1 = t0;
1084 Register tmp2 = t1;
1085 assert_different_registers(tmp1, tmp2, _obj, _addr.base(), _addr.index());
1086
1087 // If another barrier is enabled as well, do a runtime check for a specific barrier.
1088 if (_needs_keep_alive_barrier) {
1089 char state_to_check = ShenandoahHeap::HAS_FORWARDED | (_needs_load_ref_weak_barrier ? ShenandoahHeap::WEAK_ROOTS : 0);
1090 Address gc_state_fast(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_fast_array_offset(state_to_check)));
1091 __ lbu(tmp1, gc_state_fast);
1092 maybe_far_jump_if_zero(masm, tmp1, L_done);
1093 }
1094
1095 // If weak references are being processed, weak/phantom loads need to go slow,
1096 // regardless of their cset status.
1097 if (_needs_load_ref_weak_barrier) {
1098 Address gc_state_fast(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_fast_array_offset(ShenandoahHeap::WEAK_ROOTS)));
1099 __ lbu(tmp1, gc_state_fast);
1100 __ bnez(tmp1, L_slow);
1101 }
1102
1103 // Cset-check. Fall-through to slow if in collection set.
1104 if (_narrow) {
1105 __ decode_heap_oop_not_null(tmp2, _obj);
1106 } else {
1107 __ mv(tmp2, _obj);
1108 }
1109
1110 __ mv(tmp1, ShenandoahHeap::in_cset_fast_test_addr());
1111 __ srli(tmp2, tmp2, ShenandoahHeapRegion::region_size_bytes_shift_jint());
1112 __ add(tmp1, tmp1, tmp2);
1113 __ lbu(tmp1, Address(tmp1, 0));
1114 maybe_far_jump_if_zero(masm, tmp1, L_done);
1115
1116 // Slow path
1117 __ bind(L_slow);
1118
1119 // Obj is the result, need to temporarily stop preserving it.
1120 dont_preserve(_obj);
1121 {
1122 SaveLiveRegisters slr(&masm, this);
1123
1124 // Shuffle in the arguments. The end result should be:
1125 // c_rarg0 <- obj
1126 // c_rarg1 <- lea(addr)
1127 if (c_rarg0 == _obj) {
1128 __ la(c_rarg1, _addr);
1129 } else if (c_rarg1 == _obj) {
1130 // Set up arguments in reverse, and then flip them
1131 __ la(c_rarg0, _addr);
1132 // flip them
1133 __ mv(t0, c_rarg0);
1134 __ mv(c_rarg0, c_rarg1);
1135 __ mv(c_rarg1, t0);
1136 } else {
1137 assert_different_registers(c_rarg1, _obj);
1138 __ la(c_rarg1, _addr);
1139 __ mv(c_rarg0, _obj);
1140 }
1141
1142 // Go to runtime and handle the rest there.
1143 __ rt_call(lrb_runtime_entry_addr());
1144
1145 // Save the result where needed.
1146 if (_narrow) {
1147 __ zext_w(_obj, x10);
1148 } else {
1149 __ mv(_obj, x10);
1150 }
1151 }
1152 preserve(_obj);
1153
1154 __ j(*L_done);
1155 }
1156
1157 #endif // COMPILER2
|