< prev index next >

src/hotspot/cpu/x86/gc/shenandoah/shenandoahBarrierSetAssembler_x86.cpp

Print this page
@@ -32,10 +32,11 @@
  #include "gc/shenandoah/shenandoahHeap.inline.hpp"
  #include "gc/shenandoah/shenandoahHeapRegion.hpp"
  #include "gc/shenandoah/shenandoahRuntime.hpp"
  #include "gc/shenandoah/shenandoahThreadLocalData.hpp"
  #include "interpreter/interpreter.hpp"
+ #include "nativeInst_x86.hpp"
  #include "runtime/javaThread.hpp"
  #include "runtime/sharedRuntime.hpp"
  #include "utilities/macros.hpp"
  #ifdef COMPILER1
  #include "c1/c1_LIRAssembler.hpp"

@@ -1184,16 +1185,75 @@
  #define __ masm.
  
  void ShenandoahBarrierStubC2::enter_if_gc_state(MacroAssembler& masm, const char test_state) {
    Assembler::InlineSkippedInstructionsCounter skip_counter(&masm);
  
-   Address gc_state_fast(r15_thread, in_bytes(ShenandoahThreadLocalData::gc_state_fast_offset()));
-   __ testb(gc_state_fast, ShenandoahThreadLocalData::gc_state_to_fast(test_state));
-   __ jcc(Assembler::notZero, *entry());
+   // Emit the unconditional branch in the first version of the method.
+   // Let the rest of runtime figure out how to manage it.
+   __ relocate(barrier_Relocation::spec());
+   __ jmp(*entry(), /* maybe_short = */ false);
+ 
+ #ifdef ASSERT
+   Address gc_state(r15_thread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
+   __ testb(gc_state, 0xFF);
+   __ jccb(Assembler::zero, *continuation());
+   __ hlt(); // Correctness bug: barrier is NOP-ed, but heap is NOT IDLE
+ #endif
+   // TODO: When barriers are consistently turned off at the end of the cycle, assert that barrier is NOP-ed.
+ 
    __ bind(*continuation());
  }
  
+ address ShenandoahBarrierSetAssembler::parse_stub_address(address pc) {
+   NativeInstruction* ni = nativeInstruction_at(pc);
+   assert(ni->is_jump(), "Initial code version: GC barrier fastpath must be a jump");
+   NativeJump* jmp = nativeJump_at(pc);
+   return jmp->jump_destination();
+ }
+ 
+ void insert_5_byte_nop(address pc) {
+   *(pc + 0) = 0x0F;
+   *(pc + 1) = 0x1F;
+   *(pc + 2) = 0x44;
+   *(pc + 3) = 0x00;
+   *(pc + 4) = 0x00;
+   ICache::invalidate_range(pc, 5);
+ }
+ 
+ bool is_5_byte_nop(address pc) {
+   if (*(pc + 0) != 0x0F) return false;
+   if (*(pc + 1) != 0x1F) return false;
+   if (*(pc + 2) != 0x44) return false;
+   if (*(pc + 3) != 0x00) return false;
+   if (*(pc + 4) != 0x00) return false;
+   return true;
+ }
+ 
+ void check_at(bool cond, address pc, const char* msg) {
+   assert(cond, "%s: at PC " PTR_FORMAT ": %02x%02x%02x%02x%02x",
+          msg, p2i(pc), *(pc + 0), *(pc + 1), *(pc + 2), *(pc + 3), *(pc + 4));
+ }
+ 
+ void ShenandoahBarrierSetAssembler::patch_branch_to_nop(address pc) {
+   NativeInstruction* ni = nativeInstruction_at(pc);
+   if (ni->is_jump()) {
+     insert_5_byte_nop(pc);
+   } else {
+     check_at(is_5_byte_nop(pc), pc, "Should already be nop");
+   }
+ }
+ 
+ void ShenandoahBarrierSetAssembler::patch_nop_to_branch(address pc, address stub_addr) {
+   NativeInstruction* ni = nativeInstruction_at(pc);
+   if (is_5_byte_nop(pc)) {
+     NativeJump::insert(pc, stub_addr);
+   } else {
+     check_at(ni->is_jump(), pc, "Should already be jump");
+     check_at(nativeJump_at(pc)->jump_destination() == stub_addr, pc, "Jump should be to the same address");
+   }
+ }
+ 
  void ShenandoahBarrierStubC2::emit_code(MacroAssembler& masm) {
    assert(_needs_keep_alive_barrier || _needs_load_ref_barrier, "Why are you here?");
  
    __ bind(*entry());
  

@@ -1265,16 +1325,14 @@
    Address index(r15_thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
    Address buffer(r15_thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset()));
  
    Label L_fast, L_done;
  
-   // If another barrier is enabled as well, do a runtime check for a specific barrier.
-   if (_needs_load_ref_barrier) {
-     Address gc_state(r15_thread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
-     __ testb(gc_state, ShenandoahHeap::MARKING);
-     __ jccb(Assembler::zero, L_done);
-   }
+   // Hotpatched GC checks only care about idle/non-idle state, so we need to check again here.
+   Address gc_state(r15_thread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
+   __ testb(gc_state, ShenandoahHeap::MARKING);
+   __ jccb(Assembler::zero, L_done);
  
    // Check if buffer is already full. Go slow, if so.
    __ movptr(tmp1, index);
    __ testptr(tmp1, tmp1);
    __ jccb(Assembler::notZero, L_fast);

@@ -1312,16 +1370,14 @@
  }
  
  void ShenandoahBarrierStubC2::lrb(MacroAssembler& masm, Register obj, Address addr, Register tmp) {
    Label L_done, L_slow;
  
-   // If another barrier is enabled as well, do a runtime check for a specific barrier.
-   if (_needs_keep_alive_barrier) {
-     Address gc_state(r15_thread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
-     __ testb(gc_state, ShenandoahHeap::HAS_FORWARDED | (_needs_load_ref_weak_barrier ? ShenandoahHeap::WEAK_ROOTS : 0));
-     __ jccb(Assembler::zero, L_done);
-   }
+   // Hotpatched GC checks only care about idle/non-idle state, so we need a check here.
+   Address gc_state(r15_thread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
+   __ testb(gc_state, ShenandoahHeap::HAS_FORWARDED | (_needs_load_ref_weak_barrier ? ShenandoahHeap::WEAK_ROOTS : 0));
+   __ jccb(Assembler::zero, L_done);
  
    // If weak references are being processed, weak/phantom loads need to go slow,
    // regadless of their cset status.
    if (_needs_load_ref_weak_barrier) {
      Address gc_state(r15_thread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));

@@ -1492,10 +1548,7 @@
    assert_different_registers(tmp, addr.base());
    assert_different_registers(tmp, addr.index());
    return tmp;
  }
  
- void ShenandoahBarrierStubC2::post_init(int offset) {
-   // Do nothing.
- }
  #undef __
  #endif
< prev index next >