< prev index next >

src/hotspot/cpu/aarch64/gc/shenandoah/shenandoahBarrierSetAssembler_aarch64.cpp

Print this page

  1 /*
  2  * Copyright (c) 2018, 2022, Red Hat, Inc. All rights reserved.

  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
 27 #include "gc/shenandoah/shenandoahBarrierSetAssembler.hpp"
 28 #include "gc/shenandoah/shenandoahForwarding.hpp"
 29 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 30 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
 31 #include "gc/shenandoah/shenandoahRuntime.hpp"
 32 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 33 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"

 34 #include "interpreter/interpreter.hpp"
 35 #include "interpreter/interp_masm.hpp"
 36 #include "runtime/javaThread.hpp"
 37 #include "runtime/sharedRuntime.hpp"
 38 #ifdef COMPILER1
 39 #include "c1/c1_LIRAssembler.hpp"
 40 #include "c1/c1_MacroAssembler.hpp"
 41 #include "gc/shenandoah/c1/shenandoahBarrierSetC1.hpp"
 42 #endif
 43 
 44 #define __ masm->
 45 
 46 void ShenandoahBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
 47                                                        Register src, Register dst, Register count, RegSet saved_regs) {
 48   if (is_oop) {
 49     bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0;
 50     if ((ShenandoahSATBBarrier && !dest_uninitialized) || ShenandoahIUBarrier || ShenandoahLoadRefBarrier) {
 51 
 52       Label done;
 53 
 54       // Avoid calling runtime if count == 0
 55       __ cbz(count, done);
 56 
 57       // Is GC active?
 58       Address gc_state(rthread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
 59       __ ldrb(rscratch1, gc_state);
 60       if (ShenandoahSATBBarrier && dest_uninitialized) {
 61         __ tbz(rscratch1, ShenandoahHeap::HAS_FORWARDED_BITPOS, done);
 62       } else {
 63         __ mov(rscratch2, ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::MARKING);
 64         __ tst(rscratch1, rscratch2);
 65         __ br(Assembler::EQ, done);
 66       }
 67 
 68       __ push(saved_regs, sp);
 69       if (UseCompressedOops) {
 70         __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::arraycopy_barrier_narrow_oop_entry), src, dst, count);
 71       } else {
 72         __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::arraycopy_barrier_oop_entry), src, dst, count);
 73       }
 74       __ pop(saved_regs, sp);
 75       __ bind(done);
 76     }
 77   }
 78 }
 79 







 80 void ShenandoahBarrierSetAssembler::shenandoah_write_barrier_pre(MacroAssembler* masm,
 81                                                                  Register obj,
 82                                                                  Register pre_val,
 83                                                                  Register thread,
 84                                                                  Register tmp,
 85                                                                  bool tosca_live,
 86                                                                  bool expand_call) {
 87   if (ShenandoahSATBBarrier) {
 88     satb_write_barrier_pre(masm, obj, pre_val, thread, tmp, rscratch1, tosca_live, expand_call);
 89   }
 90 }
 91 
 92 void ShenandoahBarrierSetAssembler::satb_write_barrier_pre(MacroAssembler* masm,
 93                                                            Register obj,
 94                                                            Register pre_val,
 95                                                            Register thread,
 96                                                            Register tmp1,
 97                                                            Register tmp2,
 98                                                            bool tosca_live,
 99                                                            bool expand_call) {

288     }
289   } else {
290     assert(is_phantom, "only remaining strength");
291     assert(!is_narrow, "phantom access cannot be narrow");
292     __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom));
293   }
294   __ blr(lr);
295   __ mov(rscratch1, r0);
296   __ pop_call_clobbered_registers();
297   __ mov(r0, rscratch1);
298 
299   __ bind(not_cset);
300 
301   __ mov(result_dst, r0);
302   __ pop(to_save, sp);
303 
304   __ bind(heap_stable);
305   __ leave();
306 }
307 
308 void ShenandoahBarrierSetAssembler::iu_barrier(MacroAssembler* masm, Register dst, Register tmp) {
309   if (ShenandoahIUBarrier) {
310     __ push_call_clobbered_registers();
311     satb_write_barrier_pre(masm, noreg, dst, rthread, tmp, rscratch1, true, false);
312     __ pop_call_clobbered_registers();
313   }
314 }
315 
316 //
317 // Arguments:
318 //
319 // Inputs:
320 //   src:        oop location to load from, might be clobbered
321 //
322 // Output:
323 //   dst:        oop loaded from src location
324 //
325 // Kill:
326 //   rscratch1 (scratch reg)
327 //
328 // Alias:
329 //   dst: rscratch1 (might use rscratch1 as temporary output register to avoid clobbering src)
330 //
331 void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
332                                             Register dst, Address src, Register tmp1, Register tmp2) {
333   // 1: non-reference load, no additional barrier is needed
334   if (!is_reference_type(type)) {
335     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp2);

358     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp2);
359   }
360 
361   // 3: apply keep-alive barrier if needed
362   if (ShenandoahBarrierSet::need_keep_alive_barrier(decorators, type)) {
363     __ enter(/*strip_ret_addr*/true);
364     __ push_call_clobbered_registers();
365     satb_write_barrier_pre(masm /* masm */,
366                            noreg /* obj */,
367                            dst /* pre_val */,
368                            rthread /* thread */,
369                            tmp1 /* tmp1 */,
370                            tmp2 /* tmp2 */,
371                            true /* tosca_live */,
372                            true /* expand_call */);
373     __ pop_call_clobbered_registers();
374     __ leave();
375   }
376 }
377 




















378 void ShenandoahBarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
379                                              Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
380   bool on_oop = is_reference_type(type);
381   if (!on_oop) {
382     BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3);
383     return;
384   }
385 
386   // flatten object address if needed
387   if (dst.index() == noreg && dst.offset() == 0) {
388     if (dst.base() != tmp3) {
389       __ mov(tmp3, dst.base());
390     }
391   } else {
392     __ lea(tmp3, dst);
393   }
394 
395   shenandoah_write_barrier_pre(masm,
396                                tmp3 /* obj */,
397                                tmp2 /* pre_val */,
398                                rthread /* thread */,
399                                tmp1  /* tmp */,
400                                val != noreg /* tosca_live */,
401                                false /* expand_call */);
402 
403   if (val == noreg) {
404     BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), noreg, noreg, noreg, noreg);
405   } else {
406     iu_barrier(masm, val, tmp1);
407     // G1 barrier needs uncompressed oop for region cross check.
408     Register new_val = val;
409     if (UseCompressedOops) {
410       new_val = rscratch2;
411       __ mov(new_val, val);
412     }
413     BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), val, noreg, noreg, noreg);



414   }
415 
416 }
417 
418 void ShenandoahBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
419                                                                   Register obj, Register tmp, Label& slowpath) {
420   Label done;
421   // Resolve jobject
422   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, obj, tmp, slowpath);
423 
424   // Check for null.
425   __ cbz(obj, done);
426 
427   assert(obj != rscratch2, "need rscratch2");
428   Address gc_state(jni_env, ShenandoahThreadLocalData::gc_state_offset() - JavaThread::jni_environment_offset());
429   __ lea(rscratch2, gc_state);
430   __ ldrb(rscratch2, Address(rscratch2));
431 
432   // Check for heap in evacuation phase
433   __ tbnz(rscratch2, ShenandoahHeap::EVACUATION_BITPOS, slowpath);

578   if (is_cae) {
579     // We're falling through to done to indicate success.  Success
580     // with is_cae is denoted by returning the value of expected as
581     // result.
582     __ mov(tmp2, expected);
583   }
584 
585   __ bind(done);
586   // At entry to done, the Z (EQ) flag is on iff if the CAS
587   // operation was successful.  Additionally, if is_cae, tmp2 holds
588   // the value most recently fetched from addr. In this case, success
589   // is denoted by tmp2 matching expected.
590 
591   if (is_cae) {
592     __ mov(result, tmp2);
593   } else {
594     __ cset(result, Assembler::EQ);
595   }
596 }
597 





























598 #undef __
599 
600 #ifdef COMPILER1
601 
602 #define __ ce->masm()->
603 
604 void ShenandoahBarrierSetAssembler::gen_pre_barrier_stub(LIR_Assembler* ce, ShenandoahPreBarrierStub* stub) {
605   ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
606   // At this point we know that marking is in progress.
607   // If do_load() is true then we have to emit the
608   // load of the previous value; otherwise it has already
609   // been loaded into _pre_val.
610 
611   __ bind(*stub->entry());
612 
613   assert(stub->pre_val()->is_register(), "Precondition.");
614 
615   Register pre_val_reg = stub->pre_val()->as_register();
616 
617   if (stub->do_load()) {

  1 /*
  2  * Copyright (c) 2018, 2022, Red Hat, Inc. All rights reserved.
  3  * Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
 28 #include "gc/shenandoah/shenandoahBarrierSetAssembler.hpp"
 29 #include "gc/shenandoah/shenandoahForwarding.hpp"
 30 #include "gc/shenandoah/shenandoahHeap.inline.hpp"
 31 #include "gc/shenandoah/shenandoahHeapRegion.hpp"
 32 #include "gc/shenandoah/shenandoahRuntime.hpp"
 33 #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
 34 #include "gc/shenandoah/heuristics/shenandoahHeuristics.hpp"
 35 #include "gc/shenandoah/mode/shenandoahMode.hpp"
 36 #include "interpreter/interpreter.hpp"
 37 #include "interpreter/interp_masm.hpp"
 38 #include "runtime/javaThread.hpp"
 39 #include "runtime/sharedRuntime.hpp"
 40 #ifdef COMPILER1
 41 #include "c1/c1_LIRAssembler.hpp"
 42 #include "c1/c1_MacroAssembler.hpp"
 43 #include "gc/shenandoah/c1/shenandoahBarrierSetC1.hpp"
 44 #endif
 45 
 46 #define __ masm->
 47 
 48 void ShenandoahBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
 49                                                        Register src, Register dst, Register count, RegSet saved_regs) {
 50   if (is_oop) {
 51     bool dest_uninitialized = (decorators & IS_DEST_UNINITIALIZED) != 0;
 52     if ((ShenandoahSATBBarrier && !dest_uninitialized) || ShenandoahLoadRefBarrier) {
 53 
 54       Label done;
 55 
 56       // Avoid calling runtime if count == 0
 57       __ cbz(count, done);
 58 
 59       // Is GC active?
 60       Address gc_state(rthread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
 61       __ ldrb(rscratch1, gc_state);
 62       if (ShenandoahSATBBarrier && dest_uninitialized) {
 63         __ tbz(rscratch1, ShenandoahHeap::HAS_FORWARDED_BITPOS, done);
 64       } else {
 65         __ mov(rscratch2, ShenandoahHeap::HAS_FORWARDED | ShenandoahHeap::MARKING);
 66         __ tst(rscratch1, rscratch2);
 67         __ br(Assembler::EQ, done);
 68       }
 69 
 70       __ push(saved_regs, sp);
 71       if (UseCompressedOops) {
 72         __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::arraycopy_barrier_narrow_oop_entry), src, dst, count);
 73       } else {
 74         __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::arraycopy_barrier_oop_entry), src, dst, count);
 75       }
 76       __ pop(saved_regs, sp);
 77       __ bind(done);
 78     }
 79   }
 80 }
 81 
 82 void ShenandoahBarrierSetAssembler::arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
 83                                                        Register start, Register count, Register tmp, RegSet saved_regs) {
 84   if (ShenandoahCardBarrier && is_oop) {
 85     gen_write_ref_array_post_barrier(masm, decorators, start, count, tmp, saved_regs);
 86   }
 87 }
 88 
 89 void ShenandoahBarrierSetAssembler::shenandoah_write_barrier_pre(MacroAssembler* masm,
 90                                                                  Register obj,
 91                                                                  Register pre_val,
 92                                                                  Register thread,
 93                                                                  Register tmp,
 94                                                                  bool tosca_live,
 95                                                                  bool expand_call) {
 96   if (ShenandoahSATBBarrier) {
 97     satb_write_barrier_pre(masm, obj, pre_val, thread, tmp, rscratch1, tosca_live, expand_call);
 98   }
 99 }
100 
101 void ShenandoahBarrierSetAssembler::satb_write_barrier_pre(MacroAssembler* masm,
102                                                            Register obj,
103                                                            Register pre_val,
104                                                            Register thread,
105                                                            Register tmp1,
106                                                            Register tmp2,
107                                                            bool tosca_live,
108                                                            bool expand_call) {

297     }
298   } else {
299     assert(is_phantom, "only remaining strength");
300     assert(!is_narrow, "phantom access cannot be narrow");
301     __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom));
302   }
303   __ blr(lr);
304   __ mov(rscratch1, r0);
305   __ pop_call_clobbered_registers();
306   __ mov(r0, rscratch1);
307 
308   __ bind(not_cset);
309 
310   __ mov(result_dst, r0);
311   __ pop(to_save, sp);
312 
313   __ bind(heap_stable);
314   __ leave();
315 }
316 








317 //
318 // Arguments:
319 //
320 // Inputs:
321 //   src:        oop location to load from, might be clobbered
322 //
323 // Output:
324 //   dst:        oop loaded from src location
325 //
326 // Kill:
327 //   rscratch1 (scratch reg)
328 //
329 // Alias:
330 //   dst: rscratch1 (might use rscratch1 as temporary output register to avoid clobbering src)
331 //
332 void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
333                                             Register dst, Address src, Register tmp1, Register tmp2) {
334   // 1: non-reference load, no additional barrier is needed
335   if (!is_reference_type(type)) {
336     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp2);

359     BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp2);
360   }
361 
362   // 3: apply keep-alive barrier if needed
363   if (ShenandoahBarrierSet::need_keep_alive_barrier(decorators, type)) {
364     __ enter(/*strip_ret_addr*/true);
365     __ push_call_clobbered_registers();
366     satb_write_barrier_pre(masm /* masm */,
367                            noreg /* obj */,
368                            dst /* pre_val */,
369                            rthread /* thread */,
370                            tmp1 /* tmp1 */,
371                            tmp2 /* tmp2 */,
372                            true /* tosca_live */,
373                            true /* expand_call */);
374     __ pop_call_clobbered_registers();
375     __ leave();
376   }
377 }
378 
379 void ShenandoahBarrierSetAssembler::store_check(MacroAssembler* masm, Register obj) {
380   assert(ShenandoahCardBarrier, "Should have been checked by caller");
381 
382   __ lsr(obj, obj, CardTable::card_shift());
383 
384   assert(CardTable::dirty_card_val() == 0, "must be");
385 
386   __ load_byte_map_base(rscratch1);
387 
388   if (UseCondCardMark) {
389     Label L_already_dirty;
390     __ ldrb(rscratch2, Address(obj, rscratch1));
391     __ cbz(rscratch2, L_already_dirty);
392     __ strb(zr, Address(obj, rscratch1));
393     __ bind(L_already_dirty);
394   } else {
395     __ strb(zr, Address(obj, rscratch1));
396   }
397 }
398 
399 void ShenandoahBarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
400                                              Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
401   bool on_oop = is_reference_type(type);
402   if (!on_oop) {
403     BarrierSetAssembler::store_at(masm, decorators, type, dst, val, tmp1, tmp2, tmp3);
404     return;
405   }
406 
407   // flatten object address if needed
408   if (dst.index() == noreg && dst.offset() == 0) {
409     if (dst.base() != tmp3) {
410       __ mov(tmp3, dst.base());
411     }
412   } else {
413     __ lea(tmp3, dst);
414   }
415 
416   shenandoah_write_barrier_pre(masm,
417                                tmp3 /* obj */,
418                                tmp2 /* pre_val */,
419                                rthread /* thread */,
420                                tmp1  /* tmp */,
421                                val != noreg /* tosca_live */,
422                                false /* expand_call */);
423 
424   if (val == noreg) {
425     BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), noreg, noreg, noreg, noreg);
426   } else {
427     // Barrier needs uncompressed oop for region cross check.

428     Register new_val = val;
429     if (UseCompressedOops) {
430       new_val = rscratch2;
431       __ mov(new_val, val);
432     }
433     BarrierSetAssembler::store_at(masm, decorators, type, Address(tmp3, 0), val, noreg, noreg, noreg);
434     if (ShenandoahCardBarrier) {
435       store_check(masm, tmp3);
436     }
437   }
438 
439 }
440 
441 void ShenandoahBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
442                                                                   Register obj, Register tmp, Label& slowpath) {
443   Label done;
444   // Resolve jobject
445   BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, obj, tmp, slowpath);
446 
447   // Check for null.
448   __ cbz(obj, done);
449 
450   assert(obj != rscratch2, "need rscratch2");
451   Address gc_state(jni_env, ShenandoahThreadLocalData::gc_state_offset() - JavaThread::jni_environment_offset());
452   __ lea(rscratch2, gc_state);
453   __ ldrb(rscratch2, Address(rscratch2));
454 
455   // Check for heap in evacuation phase
456   __ tbnz(rscratch2, ShenandoahHeap::EVACUATION_BITPOS, slowpath);

601   if (is_cae) {
602     // We're falling through to done to indicate success.  Success
603     // with is_cae is denoted by returning the value of expected as
604     // result.
605     __ mov(tmp2, expected);
606   }
607 
608   __ bind(done);
609   // At entry to done, the Z (EQ) flag is on iff if the CAS
610   // operation was successful.  Additionally, if is_cae, tmp2 holds
611   // the value most recently fetched from addr. In this case, success
612   // is denoted by tmp2 matching expected.
613 
614   if (is_cae) {
615     __ mov(result, tmp2);
616   } else {
617     __ cset(result, Assembler::EQ);
618   }
619 }
620 
621 void ShenandoahBarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,
622                                                                      Register start, Register count, Register scratch, RegSet saved_regs) {
623   assert(ShenandoahCardBarrier, "Should have been checked by caller");
624 
625   Label L_loop, L_done;
626   const Register end = count;
627 
628   // Zero count? Nothing to do.
629   __ cbz(count, L_done);
630 
631   // end = start + count << LogBytesPerHeapOop
632   // last element address to make inclusive
633   __ lea(end, Address(start, count, Address::lsl(LogBytesPerHeapOop)));
634   __ sub(end, end, BytesPerHeapOop);
635   __ lsr(start, start, CardTable::card_shift());
636   __ lsr(end, end, CardTable::card_shift());
637 
638   // number of bytes to copy
639   __ sub(count, end, start);
640 
641   __ load_byte_map_base(scratch);
642   __ add(start, start, scratch);
643   __ bind(L_loop);
644   __ strb(zr, Address(start, count));
645   __ subs(count, count, 1);
646   __ br(Assembler::GE, L_loop);
647   __ bind(L_done);
648 }
649 
650 #undef __
651 
652 #ifdef COMPILER1
653 
654 #define __ ce->masm()->
655 
656 void ShenandoahBarrierSetAssembler::gen_pre_barrier_stub(LIR_Assembler* ce, ShenandoahPreBarrierStub* stub) {
657   ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
658   // At this point we know that marking is in progress.
659   // If do_load() is true then we have to emit the
660   // load of the previous value; otherwise it has already
661   // been loaded into _pre_val.
662 
663   __ bind(*stub->entry());
664 
665   assert(stub->pre_val()->is_register(), "Precondition.");
666 
667   Register pre_val_reg = stub->pre_val()->as_register();
668 
669   if (stub->do_load()) {
< prev index next >