1 /*
2 * Copyright (c) 2017, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23
24 #ifndef SHARE_GC_Z_ZBARRIERSET_INLINE_HPP
25 #define SHARE_GC_Z_ZBARRIERSET_INLINE_HPP
26
27 #include "gc/z/zBarrierSet.hpp"
28
29 #include "gc/shared/accessBarrierSupport.inline.hpp"
30 #include "gc/z/zAddress.hpp"
31 #include "gc/z/zAddress.inline.hpp"
32 #include "gc/z/zHeap.hpp"
33 #include "gc/z/zNMethod.hpp"
34 #include "oops/inlineKlass.inline.hpp"
35 #include "oops/objArrayOop.hpp"
36 #include "utilities/copy.hpp"
37 #include "utilities/debug.hpp"
38 #include "utilities/globalDefinitions.hpp"
39
40 template <DecoratorSet decorators, typename BarrierSetT>
41 template <DecoratorSet expected>
42 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::verify_decorators_present() {
43 if ((decorators & expected) == 0) {
44 fatal("Using unsupported access decorators");
45 }
46 }
47
48 template <DecoratorSet decorators, typename BarrierSetT>
49 template <DecoratorSet expected>
50 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::verify_decorators_absent() {
51 if ((decorators & expected) != 0) {
52 fatal("Using unsupported access decorators");
53 }
54 }
55
56 template <DecoratorSet decorators, typename BarrierSetT>
57 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::unsupported() {
58 ShouldNotReachHere();
59 }
60
61 template <DecoratorSet decorators, typename BarrierSetT>
62 inline zpointer* ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::field_addr(oop base, ptrdiff_t offset) {
63 assert(base != nullptr, "Invalid base");
64 return reinterpret_cast<zpointer*>(reinterpret_cast<intptr_t>((void*)base) + offset);
65 }
66
67 template <DecoratorSet decorators, typename BarrierSetT>
68 inline zaddress ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::load_barrier(zpointer* p, zpointer o) {
69 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
70
71 if (HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
72 if (HasDecorator<decorators, ON_STRONG_OOP_REF>::value) {
73 // Load barriers on strong oop refs don't keep objects alive
74 return ZBarrierSet::load_barrier_on_oop_field_preloaded(p, o);
75 } else if (HasDecorator<decorators, ON_WEAK_OOP_REF>::value) {
76 return ZBarrierSet::no_keep_alive_load_barrier_on_weak_oop_field_preloaded(p, o);
77 } else {
78 assert((HasDecorator<decorators, ON_PHANTOM_OOP_REF>::value), "Must be");
79 return ZBarrierSet::no_keep_alive_load_barrier_on_phantom_oop_field_preloaded(p, o);
80 }
81 } else {
82 if (HasDecorator<decorators, ON_STRONG_OOP_REF>::value) {
83 return ZBarrierSet::load_barrier_on_oop_field_preloaded(p, o);
84 } else if (HasDecorator<decorators, ON_WEAK_OOP_REF>::value) {
85 return ZBarrierSet::load_barrier_on_weak_oop_field_preloaded(p, o);
86 } else {
87 assert((HasDecorator<decorators, ON_PHANTOM_OOP_REF>::value), "Must be");
88 return ZBarrierSet::load_barrier_on_phantom_oop_field_preloaded(p, o);
89 }
90 }
91 }
92
93 template <DecoratorSet decorators, typename BarrierSetT>
94 inline zaddress ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::load_barrier_on_unknown_oop_ref(oop base, ptrdiff_t offset, zpointer* p, zpointer o) {
95 verify_decorators_present<ON_UNKNOWN_OOP_REF>();
96
97 const DecoratorSet decorators_known_strength =
98 AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
99
100 if (HasDecorator<decorators, AS_NO_KEEPALIVE>::value) {
101 if (decorators_known_strength & ON_STRONG_OOP_REF) {
102 // Load barriers on strong oop refs don't keep objects alive
103 return ZBarrierSet::load_barrier_on_oop_field_preloaded(p, o);
104 } else if (decorators_known_strength & ON_WEAK_OOP_REF) {
105 return ZBarrierSet::no_keep_alive_load_barrier_on_weak_oop_field_preloaded(p, o);
106 } else {
107 assert(decorators_known_strength & ON_PHANTOM_OOP_REF, "Must be");
108 return ZBarrierSet::no_keep_alive_load_barrier_on_phantom_oop_field_preloaded(p, o);
109 }
110 } else {
111 if (decorators_known_strength & ON_STRONG_OOP_REF) {
112 return ZBarrierSet::load_barrier_on_oop_field_preloaded(p, o);
113 } else if (decorators_known_strength & ON_WEAK_OOP_REF) {
114 return ZBarrierSet::load_barrier_on_weak_oop_field_preloaded(p, o);
115 } else {
116 assert(decorators_known_strength & ON_PHANTOM_OOP_REF, "Must be");
117 return ZBarrierSet::load_barrier_on_phantom_oop_field_preloaded(p, o);
118 }
119 }
120 }
121
122 inline zpointer ZBarrierSet::store_good(oop obj) {
123 assert(ZPointerStoreGoodMask != 0, "sanity");
124
125 const zaddress addr = to_zaddress(obj);
126 return ZAddress::store_good(addr);
127 }
128
129 template <DecoratorSet decorators, typename BarrierSetT>
130 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::store_barrier_heap_with_healing(zpointer* p) {
131 if (!HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value) {
132 ZBarrierSet::store_barrier_on_heap_oop_field(p, true /* heal */);
133 } else {
134 assert(false, "Should not be used on uninitialized memory");
135 }
136 }
137
138 template <DecoratorSet decorators, typename BarrierSetT>
139 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::store_barrier_heap_without_healing(zpointer* p) {
140 if (!HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value) {
141 ZBarrierSet::store_barrier_on_heap_oop_field(p, false /* heal */);
142 }
143 }
144
145 template <DecoratorSet decorators, typename BarrierSetT>
146 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::no_keep_alive_store_barrier_heap(zpointer* p) {
147 if (!HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value) {
148 ZBarrierSet::no_keep_alive_store_barrier_on_heap_oop_field(p);
149 }
150 }
151
152 template <DecoratorSet decorators, typename BarrierSetT>
153 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::store_barrier_native_with_healing(zpointer* p) {
154 if (!HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value) {
155 ZBarrierSet::store_barrier_on_native_oop_field(p, true /* heal */);
156 } else {
157 assert(false, "Should not be used on uninitialized memory");
158 }
159 }
160
161 template <DecoratorSet decorators, typename BarrierSetT>
162 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::store_barrier_native_without_healing(zpointer* p) {
163 if (!HasDecorator<decorators, IS_DEST_UNINITIALIZED>::value) {
164 ZBarrierSet::store_barrier_on_native_oop_field(p, false /* heal */);
165 }
166 }
167
168 //
169 // In heap
170 //
171 template <DecoratorSet decorators, typename BarrierSetT>
172 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap(zpointer* p) {
173 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
174
175 const zpointer o = Raw::load_in_heap(p);
176 assert_is_valid(o);
177
178 return to_oop(load_barrier(p, o));
179 }
180
181 template <DecoratorSet decorators, typename BarrierSetT>
182 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_in_heap_at(oop base, ptrdiff_t offset) {
183 zpointer* const p = field_addr(base, offset);
184
185 const zpointer o = Raw::load_in_heap(p);
186 assert_is_valid(o);
187
188 if (HasDecorator<decorators, ON_UNKNOWN_OOP_REF>::value) {
189 return to_oop(load_barrier_on_unknown_oop_ref(base, offset, p, o));
190 }
191
192 return to_oop(load_barrier(p, o));
193 }
194
195 template <DecoratorSet decorators>
196 bool is_store_barrier_no_keep_alive() {
197 if (HasDecorator<decorators, ON_STRONG_OOP_REF>::value) {
198 return HasDecorator<decorators, AS_NO_KEEPALIVE>::value;
199 }
200
201 if (HasDecorator<decorators, ON_WEAK_OOP_REF>::value) {
202 return true;
203 }
204
205 assert((decorators & ON_PHANTOM_OOP_REF) != 0, "Must be");
206 return true;
207 }
208
209 template <DecoratorSet decorators>
210 inline bool is_store_barrier_no_keep_alive(oop base, ptrdiff_t offset) {
211 if (!HasDecorator<decorators, ON_UNKNOWN_OOP_REF>::value) {
212 return is_store_barrier_no_keep_alive<decorators>();
213 }
214
215 const DecoratorSet decorators_known_strength =
216 AccessBarrierSupport::resolve_possibly_unknown_oop_ref_strength<decorators>(base, offset);
217
218 if ((decorators_known_strength & ON_STRONG_OOP_REF) != 0) {
219 return (decorators & AS_NO_KEEPALIVE) != 0;
220 }
221
222 if ((decorators_known_strength & ON_WEAK_OOP_REF) != 0) {
223 return true;
224 }
225
226 assert((decorators_known_strength & ON_PHANTOM_OOP_REF) != 0, "Must be");
227 return true;
228 }
229
230 template <DecoratorSet decorators, typename BarrierSetT>
231 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap(zpointer* p, oop value) {
232 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
233
234 if (is_store_barrier_no_keep_alive<decorators>()) {
235 no_keep_alive_store_barrier_heap(p);
236 } else {
237 store_barrier_heap_without_healing(p);
238 }
239
240 Raw::store_in_heap(p, store_good(value));
241 }
242
243 template <DecoratorSet decorators, typename BarrierSetT>
244 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_in_heap_at(oop base, ptrdiff_t offset, oop value) {
245 zpointer* const p = field_addr(base, offset);
246
247 if (is_store_barrier_no_keep_alive<decorators>(base, offset)) {
248 no_keep_alive_store_barrier_heap(p);
249 } else {
250 store_barrier_heap_without_healing(p);
251 }
252
253 Raw::store_in_heap(p, store_good(value));
254 }
255
256 template <DecoratorSet decorators, typename BarrierSetT>
257 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_store_not_in_heap(zpointer* p, oop value) {
258 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
259
260 if (!is_store_barrier_no_keep_alive<decorators>()) {
261 store_barrier_native_without_healing(p);
262 }
263
264 Raw::store(p, store_good(value));
265 }
266
267 template <DecoratorSet decorators, typename BarrierSetT>
268 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap(zpointer* p, oop compare_value, oop new_value) {
269 verify_decorators_present<ON_STRONG_OOP_REF>();
270 verify_decorators_absent<AS_NO_KEEPALIVE>();
271
272 store_barrier_heap_with_healing(p);
273
274 const zpointer o = Raw::atomic_cmpxchg_in_heap(p, store_good(compare_value), store_good(new_value));
275 assert_is_valid(o);
276
277 return to_oop(ZPointer::uncolor_store_good(o));
278 }
279
280 template <DecoratorSet decorators, typename BarrierSetT>
281 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_in_heap_at(oop base, ptrdiff_t offset, oop compare_value, oop new_value) {
282 verify_decorators_present<ON_STRONG_OOP_REF | ON_UNKNOWN_OOP_REF>();
283 verify_decorators_absent<AS_NO_KEEPALIVE>();
284
285 // Through Unsafe.CompareAndExchangeObject()/CompareAndSetObject() we can receive
286 // calls with ON_UNKNOWN_OOP_REF set. However, we treat these as ON_STRONG_OOP_REF,
287 // with the motivation that if you're doing Unsafe operations on a Reference.referent
288 // field, then you're on your own anyway.
289 zpointer* const p = field_addr(base, offset);
290
291 store_barrier_heap_with_healing(p);
292
293 const zpointer o = Raw::atomic_cmpxchg_in_heap(p, store_good(compare_value), store_good(new_value));
294 assert_is_valid(o);
295
296 return to_oop(ZPointer::uncolor_store_good(o));
297 }
298
299 template <DecoratorSet decorators, typename BarrierSetT>
300 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap(zpointer* p, oop new_value) {
301 verify_decorators_present<ON_STRONG_OOP_REF>();
302 verify_decorators_absent<AS_NO_KEEPALIVE>();
303
304 store_barrier_heap_with_healing(p);
305
306 const zpointer o = Raw::atomic_xchg_in_heap(p, store_good(new_value));
307 assert_is_valid(o);
308
309 return to_oop(ZPointer::uncolor_store_good(o));
310 }
311
312 template <DecoratorSet decorators, typename BarrierSetT>
313 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
314 verify_decorators_present<ON_STRONG_OOP_REF>();
315 verify_decorators_absent<AS_NO_KEEPALIVE>();
316
317 zpointer* const p = field_addr(base, offset);
318
319 store_barrier_heap_with_healing(p);
320
321 const zpointer o = Raw::atomic_xchg_in_heap(p, store_good(new_value));
322 assert_is_valid(o);
323
324 return to_oop(ZPointer::uncolor_store_good(o));
325 }
326
327 template <DecoratorSet decorators, typename BarrierSetT>
328 inline zaddress ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one_barriers(zpointer* dst, zpointer* src) {
329 store_barrier_heap_without_healing(dst);
330
331 return ZBarrierSet::load_barrier_on_oop_field(src);
332 }
333
334 template <DecoratorSet decorators, typename BarrierSetT>
335 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one(zpointer* dst, zpointer* src) {
336 const zaddress obj = oop_copy_one_barriers(dst, src);
337
338 if (HasDecorator<decorators, ARRAYCOPY_NOTNULL>::value && is_null(obj)) {
339 return OopCopyResult::failed_check_null;
340 }
341
342 AtomicAccess::store(dst, ZAddress::store_good(obj));
343
344 return OopCopyResult::ok;
345 }
346
347 template <DecoratorSet decorators, typename BarrierSetT>
348 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_clear_one(zpointer* dst) {
349 if (HasDecorator<decorators, ARRAYCOPY_NOTNULL>::value) {
350 return OopCopyResult::failed_check_null;
351 }
352
353 // Store barrier
354 store_barrier_heap_without_healing(dst);
355
356 // Store colored null
357 AtomicAccess::store(dst, color_null());
358
359 return OopCopyResult::ok;
360 }
361
362 template <DecoratorSet decorators, typename BarrierSetT>
363 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one_check_cast(zpointer* dst, zpointer* src, Klass* dst_klass) {
364 const zaddress obj = oop_copy_one_barriers(dst, src);
365
366 if (HasDecorator<decorators, ARRAYCOPY_NOTNULL>::value && is_null(obj)) {
367 return OopCopyResult::failed_check_null;
368 }
369
370 if (!oopDesc::is_instanceof_or_null(to_oop(obj), dst_klass)) {
371 // Check cast failed
372 return OopCopyResult::failed_check_class_cast;
373 }
374
375 AtomicAccess::store(dst, ZAddress::store_good(obj));
376
377 return OopCopyResult::ok;
378 }
379
380 template <DecoratorSet decorators, typename BarrierSetT>
381 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap_check_cast(zpointer* dst, zpointer* src, size_t length, Klass* dst_klass) {
382 // Check cast and copy each elements
383 for (const zpointer* const end = src + length; src < end; src++, dst++) {
384 const OopCopyResult result = oop_copy_one_check_cast(dst, src, dst_klass);
385 if (result != OopCopyResult::ok) {
386 return result;
387 }
388 }
389
390 return OopCopyResult::ok;
391 }
392
393 template <DecoratorSet decorators, typename BarrierSetT>
394 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap_no_check_cast(zpointer* dst, zpointer* src, size_t length) {
395 const bool is_disjoint = HasDecorator<decorators, ARRAYCOPY_DISJOINT>::value;
396
397 if (is_disjoint || src > dst) {
398 for (const zpointer* const end = src + length; src < end; src++, dst++) {
399 const OopCopyResult result = oop_copy_one(dst, src);
400 if (result != OopCopyResult::ok) {
401 return result;
402 }
403 }
404
405 return OopCopyResult::ok;
406 }
407
408 if (src < dst) {
409 const zpointer* const end = src;
410 src += length - 1;
411 dst += length - 1;
412 for ( ; src >= end; src--, dst--) {
413 const OopCopyResult result = oop_copy_one(dst, src);
414 if (result != OopCopyResult::ok) {
415 return result;
416 }
417 }
418
419 return OopCopyResult::ok;
420 }
421
422 // src and dst are the same; nothing to do
423 return OopCopyResult::ok;
424 }
425
426 template <DecoratorSet decorators, typename BarrierSetT>
427 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, zpointer* src_raw,
428 arrayOop dst_obj, size_t dst_offset_in_bytes, zpointer* dst_raw,
429 size_t length) {
430 zpointer* const src = arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw);
431 zpointer* const dst = arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw);
432
433 if (HasDecorator<decorators, ARRAYCOPY_CHECKCAST>::value) {
434 Klass* const dst_klass = objArrayOop(dst_obj)->element_klass();
435 return oop_arraycopy_in_heap_check_cast(dst, src, length, dst_klass);
436 } else {
437 return oop_arraycopy_in_heap_no_check_cast(dst, src, length);
438 }
439 }
440
441 template <DecoratorSet decorators, typename BarrierSetT>
442 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
443 check_is_valid_zaddress(src);
444
445 if (dst->is_refArray()) {
446 // Cloning an object array is similar to performing array copy.
447 // If an array is large enough to have its allocation segmented,
448 // this operation might require GC barriers. However, the intrinsics
449 // for cloning arrays transform the clone to an optimized allocation
450 // and arraycopy sequence, so the performance of this runtime call
451 // does not matter for object arrays.
452 clone_obj_array(objArrayOop(src), objArrayOop(dst));
453 return;
454 }
455
456 // Fix the oops
457 ZBarrierSet::load_barrier_all(src, size);
458
459 // Clone the object
460 Raw::clone_in_heap(src, dst, size);
461
462 // Color store good before handing out
463 ZBarrierSet::color_store_good_all(dst, size);
464 }
465
466 static inline void copy_primitive_payload(const void* src, const void* dst, const size_t payload_size_bytes, size_t& copied_bytes) {
467 if (payload_size_bytes == 0) {
468 return;
469 }
470 void* src_payload = (void*)(address(src) + copied_bytes);
471 void* dst_payload = (void*)(address(dst) + copied_bytes);
472 Copy::copy_value_content(src_payload, dst_payload, payload_size_bytes);
473 copied_bytes += payload_size_bytes;
474 }
475
476 static inline void clear_primitive_payload(const void* dst, const size_t payload_size_bytes, size_t& copied_bytes) {
477 if (payload_size_bytes == 0) {
478 return;
479 }
480
481 void* dst_payload = (void*)(address(dst) + copied_bytes);
482 Copy::fill_to_memory_atomic(dst_payload, payload_size_bytes);
483 copied_bytes += payload_size_bytes;
484 }
485
486 template <DecoratorSet decorators, typename BarrierSetT>
487 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::value_copy_in_heap(const ValuePayload& src, const ValuePayload& dst) {
488 precond(src.klass() == dst.klass());
489
490 const LayoutKind lk = LayoutKindHelper::get_copy_layout(src.layout_kind(), dst.layout_kind());
491 const InlineKlass* md = src.klass();
492 if (md->contains_oops()) {
493 assert(!LayoutKindHelper::is_atomic_flat(lk) ||
494 (md->nonstatic_oop_map_count() == 1 &&
495 md->layout_size_in_bytes(lk) == sizeof(zpointer)),
496 "ZGC can only handle atomic flat values with a single oop");
497
498 // Iterate over each oop map, performing:
499 // 1) possibly raw copy for any primitive payload before each map
500 // 2) load and store barrier for each oop
501 // 3) possibly raw copy for any primitive payload trailer
502
503 // addr() points at the payload start, the oop map offset are relative to
504 // the object header, adjust address to account for this discrepancy.
505 const address src_addr = src.addr();
506 const address dst_addr = dst.addr();
507 const address oop_map_adjusted_src_addr = src_addr - md->payload_offset();
508 OopMapBlock* map = md->start_of_nonstatic_oop_maps();
509 const OopMapBlock* const end = map + md->nonstatic_oop_map_count();
510 size_t size_in_bytes = md->layout_size_in_bytes(lk);
511 size_t copied_bytes = 0;
512 while (map != end) {
513 zpointer* src_p = (zpointer*)(oop_map_adjusted_src_addr + map->offset());
514 const uintptr_t oop_offset = uintptr_t(src_p) - uintptr_t(src_addr);
515 zpointer* dst_p = (zpointer*)(uintptr_t(dst_addr) + oop_offset);
516
517 // Copy any leading primitive payload before every cluster of oops
518 assert(copied_bytes < oop_offset || copied_bytes == oop_offset, "Negative sized leading payload segment");
519 copy_primitive_payload(src_addr, dst_addr, oop_offset - copied_bytes, copied_bytes);
520
521 // Copy a cluster of oops
522 for (const zpointer* const src_end = src_p + map->count(); src_p < src_end; src_p++, dst_p++) {
523 oop_copy_one(dst_p, src_p);
524 copied_bytes += sizeof(zpointer);
525 }
526 map++;
527 }
528
529 // Copy trailing primitive payload after potential oops
530 assert(copied_bytes < size_in_bytes || copied_bytes == size_in_bytes, "Negative sized trailing payload segment");
531 copy_primitive_payload(src_addr, dst_addr, size_in_bytes - copied_bytes, copied_bytes);
532 } else {
533 Raw::value_copy_in_heap(src, dst);
534 }
535 }
536
537 template <DecoratorSet decorators, typename BarrierSetT>
538 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::value_store_null_in_heap(const ValuePayload& dst) {
539 const LayoutKind lk = dst.layout_kind();
540 assert(!LayoutKindHelper::is_null_free_flat(lk), "Cannot store null in null free layout");
541 const InlineKlass* md = dst.klass();
542
543 if (md->contains_oops()) {
544 assert(!LayoutKindHelper::is_atomic_flat(lk) ||
545 (md->nonstatic_oop_map_count() == 1 &&
546 md->layout_size_in_bytes(lk) == sizeof(zpointer)),
547 "ZGC can only handle atomic flat values with a single oop");
548
549 // Iterate over each oop map, performing:
550 // 1) possibly raw clear for any primitive payload before each map
551 // 2) store barrier and clear for each oop
552 // 3) possibly raw clear for any primitive payload trailer
553
554 // addr() points at the payload start, the oop map offset are relative to
555 // the object header, adjust address to account for this discrepancy.
556 const address dst_addr = dst.addr();
557 const address oop_map_adjusted_dst_addr = dst_addr - md->payload_offset();
558 OopMapBlock* map = md->start_of_nonstatic_oop_maps();
559 const OopMapBlock* const end = map + md->nonstatic_oop_map_count();
560 size_t size_in_bytes = md->layout_size_in_bytes(lk);
561 size_t copied_bytes = 0;
562 while (map != end) {
563 zpointer* dst_p = (zpointer*)(oop_map_adjusted_dst_addr + map->offset());
564 const uintptr_t oop_offset = uintptr_t(dst_p) - uintptr_t(dst_addr);
565
566 // Clear any leading primitive payload before every cluster of oops
567 assert(copied_bytes < oop_offset || copied_bytes == oop_offset, "Negative sized leading payload segment");
568 clear_primitive_payload(dst_addr, oop_offset - copied_bytes, copied_bytes);
569
570 // Clear a cluster of oops
571 for (const zpointer* const dst_end = dst_p + map->count(); dst_p < dst_end; dst_p++) {
572 oop_clear_one(dst_p);
573 copied_bytes += sizeof(zpointer);
574 }
575 map++;
576 }
577
578 // Clear trailing primitive payload after potential oops
579 assert(copied_bytes < size_in_bytes || copied_bytes == size_in_bytes, "Negative sized trailing payload segment");
580 clear_primitive_payload(dst_addr, size_in_bytes - copied_bytes, copied_bytes);
581 } else {
582 Raw::value_store_null(dst);
583 }
584 }
585
586 //
587 // Not in heap
588 //
589 template <DecoratorSet decorators, typename BarrierSetT>
590 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(zpointer* p) {
591 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
592
593 const zpointer o = Raw::template load<zpointer>(p);
594 assert_is_valid(o);
595 return to_oop(load_barrier(p, o));
596 }
597
598 template <DecoratorSet decorators, typename BarrierSetT>
599 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(oop* p) {
600 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
601
602 return oop_load_not_in_heap((zpointer*)p);
603 }
604
605 template <DecoratorSet decorators, typename BarrierSetT>
606 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_cmpxchg_not_in_heap(zpointer* p, oop compare_value, oop new_value) {
607 verify_decorators_present<ON_STRONG_OOP_REF>();
608 verify_decorators_absent<AS_NO_KEEPALIVE>();
609
610 store_barrier_native_with_healing(p);
611
612 const zpointer o = Raw::atomic_cmpxchg(p, store_good(compare_value), store_good(new_value));
613 assert_is_valid(o);
614
615 return to_oop(ZPointer::uncolor_store_good(o));
616 }
617
618 template <DecoratorSet decorators, typename BarrierSetT>
619 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_atomic_xchg_not_in_heap(zpointer* p, oop new_value) {
620 verify_decorators_present<ON_STRONG_OOP_REF>();
621 verify_decorators_absent<AS_NO_KEEPALIVE>();
622
623 store_barrier_native_with_healing(p);
624
625 const zpointer o = Raw::atomic_xchg(p, store_good(new_value));
626 assert_is_valid(o);
627
628 return to_oop(ZPointer::uncolor_store_good(o));
629 }
630
631 #endif // SHARE_GC_Z_ZBARRIERSET_INLINE_HPP