< prev index next >

src/hotspot/cpu/aarch64/gc/shared/cardTableBarrierSetAssembler_aarch64.cpp

Print this page

 92   __ lsr(end, end, CardTable::card_shift());
 93   __ sub(count, end, start); // number of bytes to copy
 94 
 95   __ load_byte_map_base(scratch);
 96   __ add(start, start, scratch);
 97   __ bind(L_loop);
 98   __ strb(zr, Address(start, count));
 99   __ subs(count, count, 1);
100   __ br(Assembler::GE, L_loop);
101   __ bind(L_done);
102 }
103 
104 void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
105                                                 Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
106   bool in_heap = (decorators & IN_HEAP) != 0;
107   bool is_array = (decorators & IS_ARRAY) != 0;
108   bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
109   bool precise = is_array || on_anonymous;
110 
111   bool needs_post_barrier = val != noreg && in_heap;

112   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, noreg, noreg, noreg);
113   if (needs_post_barrier) {
114     // flatten object address if needed
115     if (!precise || (dst.index() == noreg && dst.offset() == 0)) {
116       store_check(masm, dst.base(), dst);








117     } else {
118       __ lea(tmp3, dst);
119       store_check(masm, tmp3, dst);
120     }
121   }
122 }

 92   __ lsr(end, end, CardTable::card_shift());
 93   __ sub(count, end, start); // number of bytes to copy
 94 
 95   __ load_byte_map_base(scratch);
 96   __ add(start, start, scratch);
 97   __ bind(L_loop);
 98   __ strb(zr, Address(start, count));
 99   __ subs(count, count, 1);
100   __ br(Assembler::GE, L_loop);
101   __ bind(L_done);
102 }
103 
104 void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
105                                                 Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
106   bool in_heap = (decorators & IN_HEAP) != 0;
107   bool is_array = (decorators & IS_ARRAY) != 0;
108   bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
109   bool precise = is_array || on_anonymous;
110 
111   bool needs_post_barrier = val != noreg && in_heap;
112 
113   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, noreg, noreg, noreg);
114   if (needs_post_barrier) {
115     // flatten object address if needed
116     if (!precise || (dst.index() == noreg && dst.offset() == 0)) {
117       if (tmp3 != noreg) {
118         // TODO 8366717 This change is from before the 'tmp3' arg was added to mainline, check if it's still needed. Same on x64. Also, this should be a __ lea
119         // Called by MacroAssembler::pack_inline_helper. We cannot corrupt the dst.base() register
120         __ mov(tmp3, dst.base());
121         store_check(masm, tmp3, dst);
122       } else {
123         // It's OK to corrupt the dst.base() register.
124         store_check(masm, dst.base(), dst);
125       }
126     } else {
127       __ lea(tmp3, dst);
128       store_check(masm, tmp3, dst);
129     }
130   }
131 }
< prev index next >