< prev index next >

src/hotspot/cpu/aarch64/gc/shared/cardTableBarrierSetAssembler_aarch64.cpp

Print this page

 91   __ lsr(end, end, CardTable::card_shift());
 92   __ sub(count, end, start); // number of bytes to copy
 93 
 94   __ load_byte_map_base(scratch);
 95   __ add(start, start, scratch);
 96   __ bind(L_loop);
 97   __ strb(zr, Address(start, count));
 98   __ subs(count, count, 1);
 99   __ br(Assembler::GE, L_loop);
100   __ bind(L_done);
101 }
102 
103 void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
104                                                 Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
105   bool in_heap = (decorators & IN_HEAP) != 0;
106   bool is_array = (decorators & IS_ARRAY) != 0;
107   bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
108   bool precise = is_array || on_anonymous;
109 
110   bool needs_post_barrier = val != noreg && in_heap;

111   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, noreg, noreg, noreg);
112   if (needs_post_barrier) {
113     // flatten object address if needed
114     if (!precise || (dst.index() == noreg && dst.offset() == 0)) {
115       store_check(masm, dst.base(), dst);








116     } else {
117       __ lea(tmp3, dst);
118       store_check(masm, tmp3, dst);
119     }
120   }
121 }

 91   __ lsr(end, end, CardTable::card_shift());
 92   __ sub(count, end, start); // number of bytes to copy
 93 
 94   __ load_byte_map_base(scratch);
 95   __ add(start, start, scratch);
 96   __ bind(L_loop);
 97   __ strb(zr, Address(start, count));
 98   __ subs(count, count, 1);
 99   __ br(Assembler::GE, L_loop);
100   __ bind(L_done);
101 }
102 
103 void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
104                                                 Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
105   bool in_heap = (decorators & IN_HEAP) != 0;
106   bool is_array = (decorators & IS_ARRAY) != 0;
107   bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
108   bool precise = is_array || on_anonymous;
109 
110   bool needs_post_barrier = val != noreg && in_heap;
111 
112   BarrierSetAssembler::store_at(masm, decorators, type, dst, val, noreg, noreg, noreg);
113   if (needs_post_barrier) {
114     // flatten object address if needed
115     if (!precise || (dst.index() == noreg && dst.offset() == 0)) {
116       if (tmp3 != noreg) {
117         // TODO 8366717 This change is from before the 'tmp3' arg was added to mainline, check if it's still needed. Same on x64. Also, this should be a __ lea
118         // Called by MacroAssembler::pack_inline_helper. We cannot corrupt the dst.base() register
119         __ mov(tmp3, dst.base());
120         store_check(masm, tmp3, dst);
121       } else {
122         // It's OK to corrupt the dst.base() register.
123         store_check(masm, dst.base(), dst);
124       }
125     } else {
126       __ lea(tmp3, dst);
127       store_check(masm, tmp3, dst);
128     }
129   }
130 }
< prev index next >