< prev index next >

src/share/vm/c1/c1_Runtime1.cpp

Print this page

        

*** 56,66 **** #include "runtime/threadCritical.hpp" #include "runtime/vframe.hpp" #include "runtime/vframeArray.hpp" #include "utilities/copy.hpp" #include "utilities/events.hpp" ! // Implementation of StubAssembler StubAssembler::StubAssembler(CodeBuffer* code, const char * name, int stub_id) : C1_MacroAssembler(code) { _name = name; --- 56,69 ---- #include "runtime/threadCritical.hpp" #include "runtime/vframe.hpp" #include "runtime/vframeArray.hpp" #include "utilities/copy.hpp" #include "utilities/events.hpp" ! #include "utilities/macros.hpp" ! #if INCLUDE_ALL_GCS ! #include "gc_implementation/shenandoah/shenandoahBarrierSet.inline.hpp" ! #endif // Implementation of StubAssembler StubAssembler::StubAssembler(CodeBuffer* code, const char * name, int stub_id) : C1_MacroAssembler(code) { _name = name;
*** 197,206 **** --- 200,210 ---- switch (id) { // These stubs don't need to have an oopmap case dtrace_object_alloc_id: case g1_pre_barrier_slow_id: case g1_post_barrier_slow_id: + case shenandoah_lrb_slow_id: case slow_subtype_check_id: case fpu2long_stub_id: case unwind_exception_id: case counter_overflow_id: #if defined(SPARC) || defined(PPC)
*** 1304,1313 **** --- 1308,1324 ---- // barrier. The assert will fail if this is not the case. // Note that we use the non-virtual inlineable variant of write_ref_array. BarrierSet* bs = Universe::heap()->barrier_set(); assert(bs->has_write_ref_array_opt(), "Barrier set must have ref array opt"); assert(bs->has_write_ref_array_pre_opt(), "For pre-barrier as well."); + + #if INCLUDE_ALL_GCS + if (UseShenandoahGC) { + ShenandoahBarrierSet::barrier_set()->arraycopy_barrier(src_addr, dst_addr, length); + } + #endif + if (src == dst) { // same object, no check bs->write_ref_array_pre(dst_addr, length); Copy::conjoint_oops_atomic(src_addr, dst_addr, length); bs->write_ref_array((HeapWord*)dst_addr, length);
< prev index next >