31 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
32 #include "gc/shenandoah/shenandoahRuntime.hpp"
33 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
34 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"
35 #include "interpreter/interpreter.hpp"
36 #include "interpreter/interp_masm.hpp"
37 #include "runtime/javaThread.hpp"
38 #include "runtime/sharedRuntime.hpp"
39 #ifdef COMPILER1
40 #include "c1/c1_LIRAssembler.hpp"
41 #include "c1/c1_MacroAssembler.hpp"
42 #include "gc/shenandoah/c1/shenandoahBarrierSetC1.hpp"
43 #endif
44
45 #define __ masm->
46
47 void ShenandoahBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
48 Register src, Register dst, Register count, RegSet saved_regs) {
49 if (is_oop) {
50 bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0;
51 if ((ShenandoahSATBBarrier && !dest_uninitialized) || ShenandoahIUBarrier || ShenandoahLoadRefBarrier) {
52
53 Label done;
54
55 // Avoid calling runtime if count == 0
56 __ beqz(count, done);
57
58 // Is GC active?
59 Address gc_state(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
60 assert_different_registers(src, dst, count, t0);
61
62 __ lbu(t0, gc_state);
63 if (ShenandoahSATBBarrier && dest_uninitialized) {
64 __ test_bit(t0, t0, ShenandoahHeap::HAS_FORWARDED_BITPOS);
65 __ beqz(t0, done);
66 } else {
67 __ andi(t0, t0, ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::MARKING);
68 __ beqz(t0, done);
69 }
70
71 __ push_reg(saved_regs, sp);
295 } else {
296 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak);
297 }
298 } else {
299 assert(is_phantom, "only remaining strength");
300 assert(!is_narrow, "phantom access cannot be narrow");
301 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak);
302 }
303 __ call(target);
304 __ mv(t0, x10);
305 __ pop_call_clobbered_registers();
306 __ mv(x10, t0);
307 __ bind(not_cset);
308 __ mv(result_dst, x10);
309 __ pop_reg(saved_regs, sp);
310
311 __ bind(heap_stable);
312 __ leave();
313 }
314
315 void ShenandoahBarrierSetAssembler::iu_barrier(MacroAssembler* masm, Register dst, Register tmp) {
316 if (ShenandoahIUBarrier) {
317 __ push_call_clobbered_registers();
318
319 satb_write_barrier_pre(masm, noreg, dst, xthread, tmp, t0, true, false);
320
321 __ pop_call_clobbered_registers();
322 }
323 }
324
325 //
326 // Arguments:
327 //
328 // Inputs:
329 // src: oop location to load from, might be clobbered
330 //
331 // Output:
332 // dst: oop loaded from src location
333 //
334 // Kill:
335 // x30 (tmp reg)
336 //
337 // Alias:
338 // dst: x30 (might use x30 as temporary output register to avoid clobbering src)
339 //
340 void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm,
341 DecoratorSet decorators,
342 BasicType type,
343 Register dst,
344 Address src,
407 // flatten object address if needed
408 if (dst.offset() == 0) {
409 if (dst.base() != tmp3) {
410 __ mv(tmp3, dst.base());
411 }
412 } else {
413 __ la(tmp3, dst);
414 }
415
416 shenandoah_write_barrier_pre(masm,
417 tmp3 /* obj */,
418 tmp2 /* pre_val */,
419 xthread /* thread */,
420 tmp1 /* tmp */,
421 val != noreg /* tosca_live */,
422 false /* expand_call */);
423
424 if (val == noreg) {
425 BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), noreg, noreg, noreg, noreg);
426 } else {
427 iu_barrier(masm, val, tmp1);
428 // G1 barrier needs uncompressed oop for region cross check.
429 Register new_val = val;
430 if (UseCompressedOops) {
431 new_val = t1;
432 __ mv(new_val, val);
433 }
434 BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), val, noreg, noreg, noreg);
435 }
436 }
437
438 void ShenandoahBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
439 Register obj, Register tmp, Label& slowpath) {
440 Label done;
441 // Resolve jobject
442 BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, obj, tmp, slowpath);
443
444 // Check for null.
445 __ beqz(obj, done);
446
447 assert(obj != t1, "need t1");
448 Address gc_state(jni_env, ShenandoahThreadLocalData::gc_state_offset() - JavaThread::jni_environment_offset());
|
31 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
32 #include "gc/shenandoah/shenandoahRuntime.hpp"
33 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
34 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"
35 #include "interpreter/interpreter.hpp"
36 #include "interpreter/interp_masm.hpp"
37 #include "runtime/javaThread.hpp"
38 #include "runtime/sharedRuntime.hpp"
39 #ifdef COMPILER1
40 #include "c1/c1_LIRAssembler.hpp"
41 #include "c1/c1_MacroAssembler.hpp"
42 #include "gc/shenandoah/c1/shenandoahBarrierSetC1.hpp"
43 #endif
44
45 #define __ masm->
46
47 void ShenandoahBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
48 Register src, Register dst, Register count, RegSet saved_regs) {
49 if (is_oop) {
50 bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0;
51 if ((ShenandoahSATBBarrier && !dest_uninitialized) || ShenandoahLoadRefBarrier) {
52
53 Label done;
54
55 // Avoid calling runtime if count == 0
56 __ beqz(count, done);
57
58 // Is GC active?
59 Address gc_state(xthread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
60 assert_different_registers(src, dst, count, t0);
61
62 __ lbu(t0, gc_state);
63 if (ShenandoahSATBBarrier && dest_uninitialized) {
64 __ test_bit(t0, t0, ShenandoahHeap::HAS_FORWARDED_BITPOS);
65 __ beqz(t0, done);
66 } else {
67 __ andi(t0, t0, ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::MARKING);
68 __ beqz(t0, done);
69 }
70
71 __ push_reg(saved_regs, sp);
295 } else {
296 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak);
297 }
298 } else {
299 assert(is_phantom, "only remaining strength");
300 assert(!is_narrow, "phantom access cannot be narrow");
301 target = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak);
302 }
303 __ call(target);
304 __ mv(t0, x10);
305 __ pop_call_clobbered_registers();
306 __ mv(x10, t0);
307 __ bind(not_cset);
308 __ mv(result_dst, x10);
309 __ pop_reg(saved_regs, sp);
310
311 __ bind(heap_stable);
312 __ leave();
313 }
314
315 //
316 // Arguments:
317 //
318 // Inputs:
319 // src: oop location to load from, might be clobbered
320 //
321 // Output:
322 // dst: oop loaded from src location
323 //
324 // Kill:
325 // x30 (tmp reg)
326 //
327 // Alias:
328 // dst: x30 (might use x30 as temporary output register to avoid clobbering src)
329 //
330 void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm,
331 DecoratorSet decorators,
332 BasicType type,
333 Register dst,
334 Address src,
397 // flatten object address if needed
398 if (dst.offset() == 0) {
399 if (dst.base() != tmp3) {
400 __ mv(tmp3, dst.base());
401 }
402 } else {
403 __ la(tmp3, dst);
404 }
405
406 shenandoah_write_barrier_pre(masm,
407 tmp3 /* obj */,
408 tmp2 /* pre_val */,
409 xthread /* thread */,
410 tmp1 /* tmp */,
411 val != noreg /* tosca_live */,
412 false /* expand_call */);
413
414 if (val == noreg) {
415 BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), noreg, noreg, noreg, noreg);
416 } else {
417 // Barrier needs uncompressed oop for region cross check.
418 Register new_val = val;
419 if (UseCompressedOops) {
420 new_val = t1;
421 __ mv(new_val, val);
422 }
423 BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), val, noreg, noreg, noreg);
424 }
425 }
426
427 void ShenandoahBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
428 Register obj, Register tmp, Label& slowpath) {
429 Label done;
430 // Resolve jobject
431 BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, obj, tmp, slowpath);
432
433 // Check for null.
434 __ beqz(obj, done);
435
436 assert(obj != t1, "need t1");
437 Address gc_state(jni_env, ShenandoahThreadLocalData::gc_state_offset() - JavaThread::jni_environment_offset());
|