< prev index next > src/hotspot/cpu/aarch64/gc/shenandoah/shenandoahBarrierSetAssembler_aarch64.cpp
Print this page
/*
* Copyright (c) 2018, 2022, Red Hat, Inc. All rights reserved.
+ * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
#include "gc/shenandoah/shenandoahHeap.inline.hpp"
#include "gc/shenandoah/shenandoahHeapRegion.hpp"
#include "gc/shenandoah/shenandoahRuntime.hpp"
#include "gc/shenandoah/shenandoahThreadLocalData.hpp"
#include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"
+ #include "gc/shenandoah/mode/shenandoahMode.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/interp_masm.hpp"
#include "runtime/javaThread.hpp"
#include "runtime/sharedRuntime.hpp"
#ifdef COMPILER1
__ bind(done);
}
}
}
+ void ShenandoahBarrierSetAssembler::arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
+ Register start, Register count, Register tmp, RegSet saved_regs) {
+ if (ShenandoahCardBarrier && is_oop) {
+ gen_write_ref_array_post_barrier(masm, decorators, start, count, tmp, saved_regs);
+ }
+ }
+
void ShenandoahBarrierSetAssembler::shenandoah_write_barrier_pre(MacroAssembler* masm,
Register obj,
Register pre_val,
Register thread,
Register tmp,
__ pop_call_clobbered_registers();
__ leave();
}
}
+ void ShenandoahBarrierSetAssembler::store_check(MacroAssembler* masm, Register obj) {
+ assert(ShenandoahCardBarrier, "Should have been checked by caller");
+
+ __ lsr(obj, obj, CardTable::card_shift());
+
+ assert(CardTable::dirty_card_val() == 0, "must be");
+
+ __ load_byte_map_base(rscratch1);
+
+ if (UseCondCardMark) {
+ Label L_already_dirty;
+ __ ldrb(rscratch2, Address(obj, rscratch1));
+ __ cbz(rscratch2, L_already_dirty);
+ __ strb(zr, Address(obj, rscratch1));
+ __ bind(L_already_dirty);
+ } else {
+ __ strb(zr, Address(obj, rscratch1));
+ }
+ }
+
void ShenandoahBarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
bool on_oop = is_reference_type(type);
if (!on_oop) {
BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3);
rthread /* thread */,
tmp1 /* tmp */,
val != noreg /* tosca_live */,
false /* expand_call */);
- if (val == noreg) {
- BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), noreg, noreg, noreg, noreg);
- } else {
- // Barrier needs uncompressed oop for region cross check.
- Register new_val = val;
- if (UseCompressedOops) {
- new_val = rscratch2;
- __ mov(new_val, val);
- }
- BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), val, noreg, noreg, noreg);
- }
+ BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), val, noreg, noreg, noreg);
+ bool in_heap = (decorators & IN_HEAP) != 0;
+ bool needs_post_barrier = (val != noreg) && in_heap && ShenandoahCardBarrier;
+ if (needs_post_barrier) {
+ store_check(masm, tmp3);
+ }
}
void ShenandoahBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
Register obj, Register tmp, Label& slowpath) {
Label done;
} else {
__ cset(result, Assembler::EQ);
}
}
+ void ShenandoahBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,
+ Register start, Register count, Register scratch, RegSet saved_regs) {
+ assert(ShenandoahCardBarrier, "Should have been checked by caller");
+
+ Label L_loop, L_done;
+ const Register end = count;
+
+ // Zero count? Nothing to do.
+ __ cbz(count, L_done);
+
+ // end = start + count << LogBytesPerHeapOop
+ // last element address to make inclusive
+ __ lea(end, Address(start, count, Address::lsl(LogBytesPerHeapOop)));
+ __ sub(end, end, BytesPerHeapOop);
+ __ lsr(start, start, CardTable::card_shift());
+ __ lsr(end, end, CardTable::card_shift());
+
+ // number of bytes to copy
+ __ sub(count, end, start);
+
+ __ load_byte_map_base(scratch);
+ __ add(start, start, scratch);
+ __ bind(L_loop);
+ __ strb(zr, Address(start, count));
+ __ subs(count, count, 1);
+ __ br(Assembler::GE, L_loop);
+ __ bind(L_done);
+ }
+
#undef __
#ifdef COMPILER1
#define __ ce->masm()->
< prev index next >