1 /*
  2  * Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "c1/c1_Defs.hpp"
 27 #include "c1/c1_LIRGenerator.hpp"
 28 #include "classfile/javaClasses.hpp"
 29 #include "gc/shared/c1/barrierSetC1.hpp"
 30 #include "utilities/macros.hpp"
 31 
 32 #ifndef PATCHED_ADDR
 33 #define PATCHED_ADDR  (max_jint)
 34 #endif
 35 
 36 #ifdef ASSERT
 37 #define __ gen->lir(__FILE__, __LINE__)->
 38 #else
 39 #define __ gen->lir()->
 40 #endif
 41 
 42 LIR_Opr BarrierSetC1::resolve_address(LIRAccess& access, bool resolve_in_register) {
 43   DecoratorSet decorators = access.decorators();
 44   bool is_array = (decorators & IS_ARRAY) != 0;
 45   bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0;
 46 
 47   LIRItem& base = access.base().item();
 48   LIR_Opr offset = access.offset().opr();
 49   LIRGenerator *gen = access.gen();
 50 
 51   LIR_Opr addr_opr;
 52   if (is_array) {
 53     addr_opr = LIR_OprFact::address(gen->emit_array_address(base.result(), offset, access.type()));
 54   } else if (needs_patching) {
 55     // we need to patch the offset in the instruction so don't allow
 56     // generate_address to try to be smart about emitting the -1.
 57     // Otherwise the patching code won't know how to find the
 58     // instruction to patch.
 59     addr_opr = LIR_OprFact::address(new LIR_Address(base.result(), PATCHED_ADDR, access.type()));
 60   } else {
 61     addr_opr = LIR_OprFact::address(gen->generate_address(base.result(), offset, 0, 0, access.type()));
 62   }
 63 
 64   if (resolve_in_register) {
 65     LIR_Opr resolved_addr = gen->new_pointer_register();
 66     if (needs_patching) {
 67       __ leal(addr_opr, resolved_addr, lir_patch_normal, access.patch_emit_info());
 68       access.clear_decorators(C1_NEEDS_PATCHING);
 69     } else {
 70       __ leal(addr_opr, resolved_addr);
 71     }
 72     return LIR_OprFact::address(new LIR_Address(resolved_addr, access.type()));
 73   } else {
 74     return addr_opr;
 75   }
 76 }
 77 
 78 void BarrierSetC1::store_at(LIRAccess& access, LIR_Opr value) {
 79   DecoratorSet decorators = access.decorators();
 80   bool in_heap = (decorators & IN_HEAP) != 0;
 81   assert(in_heap, "not supported yet");
 82 
 83   LIR_Opr resolved = resolve_address(access, false);
 84   access.set_resolved_addr(resolved);
 85   store_at_resolved(access, value);
 86 }
 87 
 88 void BarrierSetC1::load_at(LIRAccess& access, LIR_Opr result) {
 89   DecoratorSet decorators = access.decorators();
 90   bool in_heap = (decorators & IN_HEAP) != 0;
 91   assert(in_heap, "not supported yet");
 92 
 93   LIR_Opr resolved = resolve_address(access, false);
 94   access.set_resolved_addr(resolved);
 95   load_at_resolved(access, result);
 96 }
 97 
 98 void BarrierSetC1::load(LIRAccess& access, LIR_Opr result) {
 99   DecoratorSet decorators = access.decorators();
100   bool in_heap = (decorators & IN_HEAP) != 0;
101   assert(!in_heap, "consider using load_at");
102   load_at_resolved(access, result);
103 }
104 
105 LIR_Opr BarrierSetC1::atomic_cmpxchg_at(LIRAccess& access, LIRItem& cmp_value, LIRItem& new_value) {
106   DecoratorSet decorators = access.decorators();
107   bool in_heap = (decorators & IN_HEAP) != 0;
108   assert(in_heap, "not supported yet");
109 
110   access.load_address();
111 
112   LIR_Opr resolved = resolve_address(access, true);
113   access.set_resolved_addr(resolved);
114   return atomic_cmpxchg_at_resolved(access, cmp_value, new_value);
115 }
116 
117 LIR_Opr BarrierSetC1::atomic_xchg_at(LIRAccess& access, LIRItem& value) {
118   DecoratorSet decorators = access.decorators();
119   bool in_heap = (decorators & IN_HEAP) != 0;
120   assert(in_heap, "not supported yet");
121 
122   access.load_address();
123 
124   LIR_Opr resolved = resolve_address(access, true);
125   access.set_resolved_addr(resolved);
126   return atomic_xchg_at_resolved(access, value);
127 }
128 
129 LIR_Opr BarrierSetC1::atomic_add_at(LIRAccess& access, LIRItem& value) {
130   DecoratorSet decorators = access.decorators();
131   bool in_heap = (decorators & IN_HEAP) != 0;
132   assert(in_heap, "not supported yet");
133 
134   access.load_address();
135 
136   LIR_Opr resolved = resolve_address(access, true);
137   access.set_resolved_addr(resolved);
138   return atomic_add_at_resolved(access, value);
139 }
140 
141 void BarrierSetC1::store_at_resolved(LIRAccess& access, LIR_Opr value) {
142   DecoratorSet decorators = access.decorators();
143   bool is_volatile = (((decorators & MO_SEQ_CST) != 0) || AlwaysAtomicAccesses);
144   bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0;
145   bool mask_boolean = (decorators & C1_MASK_BOOLEAN) != 0;
146   LIRGenerator* gen = access.gen();
147 
148   if (mask_boolean) {
149     value = gen->mask_boolean(access.base().opr(), value, access.access_emit_info());
150   }
151 
152   if (is_volatile) {
153     __ membar_release();
154   }
155 
156   LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none;
157   if (is_volatile && !needs_patching) {
158     gen->volatile_field_store(value, access.resolved_addr()->as_address_ptr(), access.access_emit_info());
159   } else {
160     __ store(value, access.resolved_addr()->as_address_ptr(), access.access_emit_info(), patch_code);
161   }
162 
163   if (is_volatile && !support_IRIW_for_not_multiple_copy_atomic_cpu) {
164     __ membar();
165   }
166 }
167 
168 void BarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) {
169   LIRGenerator *gen = access.gen();
170   DecoratorSet decorators = access.decorators();
171   bool is_volatile = (((decorators & MO_SEQ_CST) != 0) || AlwaysAtomicAccesses);
172   bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0;
173   bool mask_boolean = (decorators & C1_MASK_BOOLEAN) != 0;
174   bool in_native = (decorators & IN_NATIVE) != 0;
175 
176   if (support_IRIW_for_not_multiple_copy_atomic_cpu && is_volatile) {
177     __ membar();
178   }
179 
180   LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none;
181   if (in_native) {
182     __ move_wide(access.resolved_addr()->as_address_ptr(), result);
183   } else if (is_volatile && !needs_patching) {
184     gen->volatile_field_load(access.resolved_addr()->as_address_ptr(), result, access.access_emit_info());
185   } else {
186     __ load(access.resolved_addr()->as_address_ptr(), result, access.access_emit_info(), patch_code);
187   }
188 
189   if (is_volatile) {
190     __ membar_acquire();
191   }
192 
193   /* Normalize boolean value returned by unsafe operation, i.e., value  != 0 ? value = true : value false. */
194   if (mask_boolean) {
195     LabelObj* equalZeroLabel = new LabelObj();
196     __ cmp(lir_cond_equal, result, 0);
197     __ branch(lir_cond_equal, equalZeroLabel->label());
198     __ move(LIR_OprFact::intConst(1), result);
199     __ branch_destination(equalZeroLabel->label());
200   }
201 }
202 
203 LIR_Opr BarrierSetC1::atomic_cmpxchg_at_resolved(LIRAccess& access, LIRItem& cmp_value, LIRItem& new_value) {
204   LIRGenerator *gen = access.gen();
205   return gen->atomic_cmpxchg(access.type(), access.resolved_addr(), cmp_value, new_value);
206 }
207 
208 LIR_Opr BarrierSetC1::atomic_xchg_at_resolved(LIRAccess& access, LIRItem& value) {
209   LIRGenerator *gen = access.gen();
210   return gen->atomic_xchg(access.type(), access.resolved_addr(), value);
211 }
212 
213 LIR_Opr BarrierSetC1::atomic_add_at_resolved(LIRAccess& access, LIRItem& value) {
214   LIRGenerator *gen = access.gen();
215   return gen->atomic_add(access.type(), access.resolved_addr(), value);
216 }
217 
218 void BarrierSetC1::generate_referent_check(LIRAccess& access, LabelObj* cont) {
219   // We might be reading the value of the referent field of a
220   // Reference object in order to attach it back to the live
221   // object graph. If G1 is enabled then we need to record
222   // the value that is being returned in an SATB log buffer.
223   //
224   // We need to generate code similar to the following...
225   //
226   // if (offset == java_lang_ref_Reference::referent_offset()) {
227   //   if (src != NULL) {
228   //     if (klass(src)->reference_type() != REF_NONE) {
229   //       pre_barrier(..., value, ...);
230   //     }
231   //   }
232   // }
233 
234   bool gen_pre_barrier = true;     // Assume we need to generate pre_barrier.
235   bool gen_offset_check = true;    // Assume we need to generate the offset guard.
236   bool gen_source_check = true;    // Assume we need to check the src object for null.
237   bool gen_type_check = true;      // Assume we need to check the reference_type.
238 
239   LIRGenerator *gen = access.gen();
240 
241   LIRItem& base = access.base().item();
242   LIR_Opr offset = access.offset().opr();
243 
244   if (offset->is_constant()) {
245     LIR_Const* constant = offset->as_constant_ptr();
246     jlong off_con = (constant->type() == T_INT ?
247                      (jlong)constant->as_jint() :
248                      constant->as_jlong());
249 
250 
251     if (off_con != (jlong) java_lang_ref_Reference::referent_offset()) {
252       // The constant offset is something other than referent_offset.
253       // We can skip generating/checking the remaining guards and
254       // skip generation of the code stub.
255       gen_pre_barrier = false;
256     } else {
257       // The constant offset is the same as referent_offset -
258       // we do not need to generate a runtime offset check.
259       gen_offset_check = false;
260     }
261   }
262 
263   // We don't need to generate stub if the source object is an array
264   if (gen_pre_barrier && base.type()->is_array()) {
265     gen_pre_barrier = false;
266   }
267 
268   if (gen_pre_barrier) {
269     // We still need to continue with the checks.
270     if (base.is_constant()) {
271       ciObject* src_con = base.get_jobject_constant();
272       guarantee(src_con != NULL, "no source constant");
273 
274       if (src_con->is_null_object()) {
275         // The constant src object is null - We can skip
276         // generating the code stub.
277         gen_pre_barrier = false;
278       } else {
279         // Non-null constant source object. We still have to generate
280         // the slow stub - but we don't need to generate the runtime
281         // null object check.
282         gen_source_check = false;
283       }
284     }
285   }
286   if (gen_pre_barrier && !PatchALot) {
287     // Can the klass of object be statically determined to be
288     // a sub-class of Reference?
289     ciType* type = base.value()->declared_type();
290     if ((type != NULL) && type->is_loaded()) {
291       if (type->is_subtype_of(gen->compilation()->env()->Reference_klass())) {
292         gen_type_check = false;
293       } else if (type->is_klass() &&
294                  !gen->compilation()->env()->Object_klass()->is_subtype_of(type->as_klass())) {
295         // Not Reference and not Object klass.
296         gen_pre_barrier = false;
297       }
298     }
299   }
300 
301   if (gen_pre_barrier) {
302     // We can have generate one runtime check here. Let's start with
303     // the offset check.
304     // Allocate temp register to base and load it here, otherwise
305     // control flow below may confuse register allocator.
306     LIR_Opr base_reg = gen->new_register(T_OBJECT);
307     __ move(base.result(), base_reg);
308     if (gen_offset_check) {
309       // if (offset != referent_offset) -> continue
310       // If offset is an int then we can do the comparison with the
311       // referent_offset constant; otherwise we need to move
312       // referent_offset into a temporary register and generate
313       // a reg-reg compare.
314 
315       LIR_Opr referent_off;
316 
317       if (offset->type() == T_INT) {
318         referent_off = LIR_OprFact::intConst(java_lang_ref_Reference::referent_offset());
319       } else {
320         assert(offset->type() == T_LONG, "what else?");
321         referent_off = gen->new_register(T_LONG);
322         __ move(LIR_OprFact::longConst(java_lang_ref_Reference::referent_offset()), referent_off);
323       }
324       __ cmp(lir_cond_notEqual, offset, referent_off);
325       __ branch(lir_cond_notEqual, cont->label());
326     }
327     if (gen_source_check) {
328       // offset is a const and equals referent offset
329       // if (source == null) -> continue
330       __ cmp(lir_cond_equal, base_reg, LIR_OprFact::oopConst(NULL));
331       __ branch(lir_cond_equal, cont->label());
332     }
333     LIR_Opr src_klass = gen->new_register(T_METADATA);
334     if (gen_type_check) {
335       // We have determined that offset == referent_offset && src != null.
336       // if (src->_klass->_reference_type == REF_NONE) -> continue
337       gen->load_klass(base_reg, src_klass, NULL);
338       LIR_Address* reference_type_addr = new LIR_Address(src_klass, in_bytes(InstanceKlass::reference_type_offset()), T_BYTE);
339       LIR_Opr reference_type = gen->new_register(T_INT);
340       __ move(reference_type_addr, reference_type);
341       __ cmp(lir_cond_equal, reference_type, LIR_OprFact::intConst(REF_NONE));
342       __ branch(lir_cond_equal, cont->label());
343     }
344   }
345 }