39 gen_write_ref_array_pre_barrier(masm, decorators, dst, count, saved_regs);
40 }
41 }
42
43 void CardTableBarrierSetAssembler::arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
44 Register start, Register count, Register tmp) {
45 if (is_oop) {
46 gen_write_ref_array_post_barrier(masm, decorators, start, count, tmp);
47 }
48 }
49
50 void CardTableBarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
51 Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
52 if (is_reference_type(type)) {
53 oop_store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3);
54 } else {
55 BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3);
56 }
57 }
58
59 void CardTableBarrierSetAssembler::store_check(MacroAssembler* masm, Register obj, Address dst) {
60
61 BarrierSet* bs = BarrierSet::barrier_set();
62 assert(bs->kind() == BarrierSet::CardTableBarrierSet, "Wrong barrier set kind");
63
64 __ lsr(obj, obj, CardTable::card_shift());
65
66 assert(CardTable::dirty_card_val() == 0, "must be");
67
68 __ load_byte_map_base(rscratch1);
69
70 if (UseCondCardMark) {
71 Label L_already_dirty;
72 __ ldrb(rscratch2, Address(obj, rscratch1));
73 __ cbz(rscratch2, L_already_dirty);
74 __ strb(zr, Address(obj, rscratch1));
75 __ bind(L_already_dirty);
76 } else {
77 __ strb(zr, Address(obj, rscratch1));
78 }
79 }
80
81 void CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,
82 Register start, Register count, Register scratch) {
83 Label L_loop, L_done;
84 const Register end = count;
85
86 __ cbz(count, L_done); // zero count - nothing to do
87
88 __ lea(end, Address(start, count, Address::lsl(LogBytesPerHeapOop))); // end = start + count << LogBytesPerHeapOop
89 __ sub(end, end, BytesPerHeapOop); // last element address to make inclusive
90 __ lsr(start, start, CardTable::card_shift());
91 __ lsr(end, end, CardTable::card_shift());
92 __ sub(count, end, start); // number of bytes to copy
93
94 __ load_byte_map_base(scratch);
95 __ add(start, start, scratch);
96 __ bind(L_loop);
97 __ strb(zr, Address(start, count));
98 __ subs(count, count, 1);
99 __ br(Assembler::GE, L_loop);
100 __ bind(L_done);
101 }
102
103 void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
104 Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
105 bool in_heap = (decorators & IN_HEAP) != 0;
106 bool is_array = (decorators & IS_ARRAY) != 0;
107 bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
108 bool precise = is_array || on_anonymous;
109
110 bool needs_post_barrier = val != noreg && in_heap;
111 BarrierSetAssembler::store_at(masm, decorators, type, dst, val, noreg, noreg, noreg);
112 if (needs_post_barrier) {
113 // flatten object address if needed
114 if (!precise || (dst.index() == noreg && dst.offset() == 0)) {
115 store_check(masm, dst.base(), dst);
116 } else {
117 __ lea(tmp3, dst);
118 store_check(masm, tmp3, dst);
119 }
120 }
121 }
|
39 gen_write_ref_array_pre_barrier(masm, decorators, dst, count, saved_regs);
40 }
41 }
42
43 void CardTableBarrierSetAssembler::arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
44 Register start, Register count, Register tmp) {
45 if (is_oop) {
46 gen_write_ref_array_post_barrier(masm, decorators, start, count, tmp);
47 }
48 }
49
50 void CardTableBarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
51 Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
52 if (is_reference_type(type)) {
53 oop_store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3);
54 } else {
55 BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3);
56 }
57 }
58
59 void CardTableBarrierSetAssembler::store_check(MacroAssembler* masm, Register obj, Address dst, Register rscratch) {
60 precond(rscratch != noreg);
61 BarrierSet* bs = BarrierSet::barrier_set();
62 assert(bs->kind() == BarrierSet::CardTableBarrierSet, "Wrong barrier set kind");
63
64 __ lsr(obj, obj, CardTable::card_shift());
65
66 assert(CardTable::dirty_card_val() == 0, "must be");
67
68 __ load_byte_map_base(rscratch);
69
70 if (UseCondCardMark) {
71 precond(rscratch != rscratch2);
72 Label L_already_dirty;
73 __ ldrb(rscratch2, Address(obj, rscratch));
74 __ cbz(rscratch2, L_already_dirty);
75 __ strb(zr, Address(obj, rscratch));
76 __ bind(L_already_dirty);
77 } else {
78 __ strb(zr, Address(obj, rscratch));
79 }
80 }
81
82 void CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,
83 Register start, Register count, Register scratch) {
84 Label L_loop, L_done;
85 const Register end = count;
86
87 __ cbz(count, L_done); // zero count - nothing to do
88
89 __ lea(end, Address(start, count, Address::lsl(LogBytesPerHeapOop))); // end = start + count << LogBytesPerHeapOop
90 __ sub(end, end, BytesPerHeapOop); // last element address to make inclusive
91 __ lsr(start, start, CardTable::card_shift());
92 __ lsr(end, end, CardTable::card_shift());
93 __ sub(count, end, start); // number of bytes to copy
94
95 __ load_byte_map_base(scratch);
96 __ add(start, start, scratch);
97 __ bind(L_loop);
98 __ strb(zr, Address(start, count));
99 __ subs(count, count, 1);
100 __ br(Assembler::GE, L_loop);
101 __ bind(L_done);
102 }
103
104 void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
105 Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
106 bool in_heap = (decorators & IN_HEAP) != 0;
107 bool is_array = (decorators & IS_ARRAY) != 0;
108 bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
109 bool precise = is_array || on_anonymous;
110
111 bool needs_post_barrier = val != noreg && in_heap;
112 BarrierSetAssembler::store_at(masm, decorators, type, dst, val, noreg, noreg, noreg);
113 if (needs_post_barrier) {
114 // flatten object address if needed
115 if (!precise || (dst.index() == noreg && dst.offset() == 0)) {
116 store_check(masm, dst.base(), dst, tmp2);
117 } else {
118 __ lea(tmp3, dst);
119 store_check(masm, tmp3, dst, tmp2);
120 }
121 }
122 }
|