< prev index next >

src/hotspot/share/gc/shared/barrierSet.hpp

Print this page
@@ -28,10 +28,11 @@
  #include "gc/shared/barrierSetConfig.hpp"
  #include "memory/memRegion.hpp"
  #include "oops/access.hpp"
  #include "oops/accessBackend.hpp"
  #include "oops/oopsHierarchy.hpp"
+ #include "utilities/exceptions.hpp"
  #include "utilities/fakeRttiSupport.hpp"
  #include "utilities/macros.hpp"
  
  class BarrierSetAssembler;
  class BarrierSetC1;

@@ -117,10 +118,13 @@
    template <class BarrierSetC2T>
    static BarrierSetC2* make_barrier_set_c2() {
      return COMPILER2_PRESENT(new BarrierSetC2T()) NOT_COMPILER2(nullptr);
    }
  
+   static void throw_array_null_pointer_store_exception(arrayOop src, arrayOop dst, TRAPS);
+   static void throw_array_store_exception(arrayOop src, arrayOop dst, TRAPS);
+ 
  public:
    // Support for optimizing compilers to call the barrier set on slow path allocations
    // that did not enter a TLAB. Used for e.g. ReduceInitialCardMarks.
    // The allocation is safe to use iff it returns true. If not, the slow-path allocation
    // is redone until it succeeds. This can e.g. prevent allocations from the slow path

@@ -280,11 +284,11 @@
      static oop oop_atomic_xchg_in_heap_at(oop base, ptrdiff_t offset, oop new_value) {
        return Raw::oop_atomic_xchg_at(base, offset, new_value);
      }
  
      template <typename T>
-     static bool oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
+     static void oop_arraycopy_in_heap(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
                                        arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
                                        size_t length);
  
      // Off-heap oop accesses. These accessors get resolved when
      // IN_HEAP is not set (e.g. when using the NativeAccess API), it is

@@ -311,10 +315,15 @@
  
      // Clone barrier support
      static void clone_in_heap(oop src, oop dst, size_t size) {
        Raw::clone(src, dst, size);
      }
+ 
+     static void value_copy_in_heap(void* src, void* dst, InlineKlass* md) {
+       Raw::value_copy(src, dst, md);
+     }
+ 
    };
  };
  
  template<typename T>
  inline T* barrier_set_cast(BarrierSet* bs) {
< prev index next >