10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23
24 #ifndef SHARE_GC_Z_ZBARRIERSET_INLINE_HPP
25 #define SHARE_GC_Z_ZBARRIERSET_INLINE_HPP
26
27 #include "gc/z/zBarrierSet.hpp"
28
29 #include "gc/shared/accessBarrierSupport.inline.hpp"
30 #include "gc/z/zAddress.inline.hpp"
31 #include "gc/z/zHeap.hpp"
32 #include "gc/z/zNMethod.hpp"
33 #include "oops/objArrayOop.hpp"
34 #include "utilities/debug.hpp"
35
36 template <DecoratorSet decorators, typename BarrierSetT>
37 template <DecoratorSet expected>
38 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::verify_decorators_present() {
39 if ((decorators & expected) == 0) {
40 fatal("Using unsupported access decorators");
41 }
42 }
43
44 template <DecoratorSet decorators, typename BarrierSetT>
45 template <DecoratorSet expected>
46 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::verify_decorators_absent() {
47 if ((decorators & expected) != 0) {
48 fatal("Using unsupported access decorators");
49 }
50 }
51
52 template <DecoratorSet decorators, typename BarrierSetT>
53 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::unsupported() {
54 ShouldNotReachHere();
314
315 store_barrier_heap_with_healing(p);
316
317 const zpointer o = Raw::atomic_xchg_in_heap(p, store_good(new_value));
318 assert_is_valid(o);
319
320 return to_oop(ZPointer::uncolor_store_good(o));
321 }
322
323 template <DecoratorSet decorators, typename BarrierSetT>
324 inline zaddress ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one_barriers(zpointer* dst, zpointer* src) {
325 store_barrier_heap_without_healing(dst);
326
327 return ZBarrierSet::load_barrier_on_oop_field(src);
328 }
329
330 template <DecoratorSet decorators, typename BarrierSetT>
331 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one(zpointer* dst, zpointer* src) {
332 const zaddress obj = oop_copy_one_barriers(dst, src);
333
334 // Future location for null-restriction check and failure reporting
335
336 AtomicAccess::store(dst, ZAddress::store_good(obj));
337
338 return OopCopyResult::ok;
339 }
340
341 template <DecoratorSet decorators, typename BarrierSetT>
342 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one_check_cast(zpointer* dst, zpointer* src, Klass* dst_klass) {
343 const zaddress obj = oop_copy_one_barriers(dst, src);
344
345 if (!oopDesc::is_instanceof_or_null(to_oop(obj), dst_klass)) {
346 // Check cast failed
347 return OopCopyResult::failed_check_class_cast;
348 }
349
350 AtomicAccess::store(dst, ZAddress::store_good(obj));
351
352 return OopCopyResult::ok;
353 }
354
355 template <DecoratorSet decorators, typename BarrierSetT>
356 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap_check_cast(zpointer* dst, zpointer* src, size_t length, Klass* dst_klass) {
357 // Check cast and copy each elements
358 for (const zpointer* const end = src + length; src < end; src++, dst++) {
359 const OopCopyResult result = oop_copy_one_check_cast(dst, src, dst_klass);
360 if (result != OopCopyResult::ok) {
361 return result;
362 }
363 }
364
400
401 template <DecoratorSet decorators, typename BarrierSetT>
402 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, zpointer* src_raw,
403 arrayOop dst_obj, size_t dst_offset_in_bytes, zpointer* dst_raw,
404 size_t length) {
405 zpointer* const src = arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw);
406 zpointer* const dst = arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw);
407
408 if (HasDecorator<decorators, ARRAYCOPY_CHECKCAST>::value) {
409 Klass* const dst_klass = objArrayOop(dst_obj)->element_klass();
410 return oop_arraycopy_in_heap_check_cast(dst, src, length, dst_klass);
411 } else {
412 return oop_arraycopy_in_heap_no_check_cast(dst, src, length);
413 }
414 }
415
416 template <DecoratorSet decorators, typename BarrierSetT>
417 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
418 check_is_valid_zaddress(src);
419
420 if (dst->is_objArray()) {
421 // Cloning an object array is similar to performing array copy.
422 // If an array is large enough to have its allocation segmented,
423 // this operation might require GC barriers. However, the intrinsics
424 // for cloning arrays transform the clone to an optimized allocation
425 // and arraycopy sequence, so the performance of this runtime call
426 // does not matter for object arrays.
427 clone_obj_array(objArrayOop(src), objArrayOop(dst));
428 return;
429 }
430
431 // Fix the oops
432 ZBarrierSet::load_barrier_all(src, size);
433
434 // Clone the object
435 Raw::clone_in_heap(src, dst, size);
436
437 // Color store good before handing out
438 ZBarrierSet::color_store_good_all(dst, size);
439 }
440
441 //
442 // Not in heap
443 //
444 template <DecoratorSet decorators, typename BarrierSetT>
445 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(zpointer* p) {
446 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
447
448 const zpointer o = Raw::template load<zpointer>(p);
449 assert_is_valid(o);
450 return to_oop(load_barrier(p, o));
451 }
452
453 template <DecoratorSet decorators, typename BarrierSetT>
454 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(oop* p) {
455 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
456
457 return oop_load_not_in_heap((zpointer*)p);
458 }
459
460 template <DecoratorSet decorators, typename BarrierSetT>
|
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23
24 #ifndef SHARE_GC_Z_ZBARRIERSET_INLINE_HPP
25 #define SHARE_GC_Z_ZBARRIERSET_INLINE_HPP
26
27 #include "gc/z/zBarrierSet.hpp"
28
29 #include "gc/shared/accessBarrierSupport.inline.hpp"
30 #include "gc/z/zAddress.hpp"
31 #include "gc/z/zAddress.inline.hpp"
32 #include "gc/z/zHeap.hpp"
33 #include "gc/z/zNMethod.hpp"
34 #include "oops/inlineKlass.inline.hpp"
35 #include "oops/objArrayOop.hpp"
36 #include "utilities/copy.hpp"
37 #include "utilities/debug.hpp"
38 #include "utilities/globalDefinitions.hpp"
39
40 template <DecoratorSet decorators, typename BarrierSetT>
41 template <DecoratorSet expected>
42 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::verify_decorators_present() {
43 if ((decorators & expected) == 0) {
44 fatal("Using unsupported access decorators");
45 }
46 }
47
48 template <DecoratorSet decorators, typename BarrierSetT>
49 template <DecoratorSet expected>
50 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::verify_decorators_absent() {
51 if ((decorators & expected) != 0) {
52 fatal("Using unsupported access decorators");
53 }
54 }
55
56 template <DecoratorSet decorators, typename BarrierSetT>
57 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::unsupported() {
58 ShouldNotReachHere();
318
319 store_barrier_heap_with_healing(p);
320
321 const zpointer o = Raw::atomic_xchg_in_heap(p, store_good(new_value));
322 assert_is_valid(o);
323
324 return to_oop(ZPointer::uncolor_store_good(o));
325 }
326
327 template <DecoratorSet decorators, typename BarrierSetT>
328 inline zaddress ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one_barriers(zpointer* dst, zpointer* src) {
329 store_barrier_heap_without_healing(dst);
330
331 return ZBarrierSet::load_barrier_on_oop_field(src);
332 }
333
334 template <DecoratorSet decorators, typename BarrierSetT>
335 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one(zpointer* dst, zpointer* src) {
336 const zaddress obj = oop_copy_one_barriers(dst, src);
337
338 if (HasDecorator<decorators, ARRAYCOPY_NOTNULL>::value && is_null(obj)) {
339 return OopCopyResult::failed_check_null;
340 }
341
342 AtomicAccess::store(dst, ZAddress::store_good(obj));
343
344 return OopCopyResult::ok;
345 }
346
347 template <DecoratorSet decorators, typename BarrierSetT>
348 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_clear_one(zpointer* dst) {
349 if (HasDecorator<decorators, ARRAYCOPY_NOTNULL>::value) {
350 return OopCopyResult::failed_check_null;
351 }
352
353 // Store barrier
354 store_barrier_heap_without_healing(dst);
355
356 // Store colored null
357 AtomicAccess::store(dst, color_null());
358
359 return OopCopyResult::ok;
360 }
361
362 template <DecoratorSet decorators, typename BarrierSetT>
363 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_copy_one_check_cast(zpointer* dst, zpointer* src, Klass* dst_klass) {
364 const zaddress obj = oop_copy_one_barriers(dst, src);
365
366 if (HasDecorator<decorators, ARRAYCOPY_NOTNULL>::value && is_null(obj)) {
367 return OopCopyResult::failed_check_null;
368 }
369
370 if (!oopDesc::is_instanceof_or_null(to_oop(obj), dst_klass)) {
371 // Check cast failed
372 return OopCopyResult::failed_check_class_cast;
373 }
374
375 AtomicAccess::store(dst, ZAddress::store_good(obj));
376
377 return OopCopyResult::ok;
378 }
379
380 template <DecoratorSet decorators, typename BarrierSetT>
381 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap_check_cast(zpointer* dst, zpointer* src, size_t length, Klass* dst_klass) {
382 // Check cast and copy each elements
383 for (const zpointer* const end = src + length; src < end; src++, dst++) {
384 const OopCopyResult result = oop_copy_one_check_cast(dst, src, dst_klass);
385 if (result != OopCopyResult::ok) {
386 return result;
387 }
388 }
389
425
426 template <DecoratorSet decorators, typename BarrierSetT>
427 inline OopCopyResult ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, zpointer* src_raw,
428 arrayOop dst_obj, size_t dst_offset_in_bytes, zpointer* dst_raw,
429 size_t length) {
430 zpointer* const src = arrayOopDesc::obj_offset_to_raw(src_obj, src_offset_in_bytes, src_raw);
431 zpointer* const dst = arrayOopDesc::obj_offset_to_raw(dst_obj, dst_offset_in_bytes, dst_raw);
432
433 if (HasDecorator<decorators, ARRAYCOPY_CHECKCAST>::value) {
434 Klass* const dst_klass = objArrayOop(dst_obj)->element_klass();
435 return oop_arraycopy_in_heap_check_cast(dst, src, length, dst_klass);
436 } else {
437 return oop_arraycopy_in_heap_no_check_cast(dst, src, length);
438 }
439 }
440
441 template <DecoratorSet decorators, typename BarrierSetT>
442 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::clone_in_heap(oop src, oop dst, size_t size) {
443 check_is_valid_zaddress(src);
444
445 if (dst->is_refArray()) {
446 // Cloning an object array is similar to performing array copy.
447 // If an array is large enough to have its allocation segmented,
448 // this operation might require GC barriers. However, the intrinsics
449 // for cloning arrays transform the clone to an optimized allocation
450 // and arraycopy sequence, so the performance of this runtime call
451 // does not matter for object arrays.
452 clone_obj_array(objArrayOop(src), objArrayOop(dst));
453 return;
454 }
455
456 // Fix the oops
457 ZBarrierSet::load_barrier_all(src, size);
458
459 // Clone the object
460 Raw::clone_in_heap(src, dst, size);
461
462 // Color store good before handing out
463 ZBarrierSet::color_store_good_all(dst, size);
464 }
465
466 static inline void copy_primitive_payload(const void* src, const void* dst, const size_t payload_size_bytes, size_t& copied_bytes) {
467 if (payload_size_bytes == 0) {
468 return;
469 }
470 void* src_payload = (void*)(address(src) + copied_bytes);
471 void* dst_payload = (void*)(address(dst) + copied_bytes);
472 Copy::copy_value_content(src_payload, dst_payload, payload_size_bytes);
473 copied_bytes += payload_size_bytes;
474 }
475
476 static inline void clear_primitive_payload(const void* dst, const size_t payload_size_bytes, size_t& copied_bytes) {
477 if (payload_size_bytes == 0) {
478 return;
479 }
480
481 void* dst_payload = (void*)(address(dst) + copied_bytes);
482 Copy::fill_to_memory_atomic(dst_payload, payload_size_bytes);
483 copied_bytes += payload_size_bytes;
484 }
485
486 template <DecoratorSet decorators, typename BarrierSetT>
487 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::value_copy_in_heap(const ValuePayload& src, const ValuePayload& dst) {
488 precond(src.klass() == dst.klass());
489
490 const LayoutKind lk = LayoutKindHelper::get_copy_layout(src.layout_kind(), dst.layout_kind());
491 const InlineKlass* md = src.klass();
492 if (md->contains_oops()) {
493 assert(!LayoutKindHelper::is_atomic_flat(lk) ||
494 (md->nonstatic_oop_map_count() == 1 &&
495 md->layout_size_in_bytes(lk) == sizeof(zpointer)),
496 "ZGC can only handle atomic flat values with a single oop");
497
498 // Iterate over each oop map, performing:
499 // 1) possibly raw copy for any primitive payload before each map
500 // 2) load and store barrier for each oop
501 // 3) possibly raw copy for any primitive payload trailer
502
503 // addr() points at the payload start, the oop map offset are relative to
504 // the object header, adjust address to account for this discrepancy.
505 const address src_addr = src.addr();
506 const address dst_addr = dst.addr();
507 const address oop_map_adjusted_src_addr = src_addr - md->payload_offset();
508 OopMapBlock* map = md->start_of_nonstatic_oop_maps();
509 const OopMapBlock* const end = map + md->nonstatic_oop_map_count();
510 size_t size_in_bytes = md->layout_size_in_bytes(lk);
511 size_t copied_bytes = 0;
512 while (map != end) {
513 zpointer* src_p = (zpointer*)(oop_map_adjusted_src_addr + map->offset());
514 const uintptr_t oop_offset = uintptr_t(src_p) - uintptr_t(src_addr);
515 zpointer* dst_p = (zpointer*)(uintptr_t(dst_addr) + oop_offset);
516
517 // Copy any leading primitive payload before every cluster of oops
518 assert(copied_bytes < oop_offset || copied_bytes == oop_offset, "Negative sized leading payload segment");
519 copy_primitive_payload(src_addr, dst_addr, oop_offset - copied_bytes, copied_bytes);
520
521 // Copy a cluster of oops
522 for (const zpointer* const src_end = src_p + map->count(); src_p < src_end; src_p++, dst_p++) {
523 oop_copy_one(dst_p, src_p);
524 copied_bytes += sizeof(zpointer);
525 }
526 map++;
527 }
528
529 // Copy trailing primitive payload after potential oops
530 assert(copied_bytes < size_in_bytes || copied_bytes == size_in_bytes, "Negative sized trailing payload segment");
531 copy_primitive_payload(src_addr, dst_addr, size_in_bytes - copied_bytes, copied_bytes);
532 } else {
533 Raw::value_copy_in_heap(src, dst);
534 }
535 }
536
537 template <DecoratorSet decorators, typename BarrierSetT>
538 inline void ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::value_store_null_in_heap(const ValuePayload& dst) {
539 const LayoutKind lk = dst.layout_kind();
540 assert(!LayoutKindHelper::is_null_free_flat(lk), "Cannot store null in null free layout");
541 const InlineKlass* md = dst.klass();
542
543 if (md->contains_oops()) {
544 assert(!LayoutKindHelper::is_atomic_flat(lk) ||
545 (md->nonstatic_oop_map_count() == 1 &&
546 md->layout_size_in_bytes(lk) == sizeof(zpointer)),
547 "ZGC can only handle atomic flat values with a single oop");
548
549 // Iterate over each oop map, performing:
550 // 1) possibly raw clear for any primitive payload before each map
551 // 2) store barrier and clear for each oop
552 // 3) possibly raw clear for any primitive payload trailer
553
554 // addr() points at the payload start, the oop map offset are relative to
555 // the object header, adjust address to account for this discrepancy.
556 const address dst_addr = dst.addr();
557 const address oop_map_adjusted_dst_addr = dst_addr - md->payload_offset();
558 OopMapBlock* map = md->start_of_nonstatic_oop_maps();
559 const OopMapBlock* const end = map + md->nonstatic_oop_map_count();
560 size_t size_in_bytes = md->layout_size_in_bytes(lk);
561 size_t copied_bytes = 0;
562 while (map != end) {
563 zpointer* dst_p = (zpointer*)(oop_map_adjusted_dst_addr + map->offset());
564 const uintptr_t oop_offset = uintptr_t(dst_p) - uintptr_t(dst_addr);
565
566 // Clear any leading primitive payload before every cluster of oops
567 assert(copied_bytes < oop_offset || copied_bytes == oop_offset, "Negative sized leading payload segment");
568 clear_primitive_payload(dst_addr, oop_offset - copied_bytes, copied_bytes);
569
570 // Clear a cluster of oops
571 for (const zpointer* const dst_end = dst_p + map->count(); dst_p < dst_end; dst_p++) {
572 oop_clear_one(dst_p);
573 copied_bytes += sizeof(zpointer);
574 }
575 map++;
576 }
577
578 // Clear trailing primitive payload after potential oops
579 assert(copied_bytes < size_in_bytes || copied_bytes == size_in_bytes, "Negative sized trailing payload segment");
580 clear_primitive_payload(dst_addr, size_in_bytes - copied_bytes, copied_bytes);
581 } else {
582 Raw::value_store_null(dst);
583 }
584 }
585
586 //
587 // Not in heap
588 //
589 template <DecoratorSet decorators, typename BarrierSetT>
590 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(zpointer* p) {
591 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
592
593 const zpointer o = Raw::template load<zpointer>(p);
594 assert_is_valid(o);
595 return to_oop(load_barrier(p, o));
596 }
597
598 template <DecoratorSet decorators, typename BarrierSetT>
599 inline oop ZBarrierSet::AccessBarrier<decorators, BarrierSetT>::oop_load_not_in_heap(oop* p) {
600 verify_decorators_absent<ON_UNKNOWN_OOP_REF>();
601
602 return oop_load_not_in_heap((zpointer*)p);
603 }
604
605 template <DecoratorSet decorators, typename BarrierSetT>
|