< prev index next >

src/hotspot/cpu/aarch64/gc/shenandoah/shenandoahBarrierSetAssembler_aarch64.hpp

Print this page

 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  *
 25  */
 26 
 27 #ifndef CPU_AARCH64_GC_SHENANDOAH_SHENANDOAHBARRIERSETASSEMBLER_AARCH64_HPP
 28 #define CPU_AARCH64_GC_SHENANDOAH_SHENANDOAHBARRIERSETASSEMBLER_AARCH64_HPP
 29 
 30 #include "asm/macroAssembler.hpp"
 31 #include "gc/shared/barrierSetAssembler.hpp"
 32 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
 33 #ifdef COMPILER1
 34 class LIR_Assembler;
 35 class ShenandoahPreBarrierStub;
 36 class ShenandoahLoadReferenceBarrierStub;
 37 class StubAssembler;
 38 #endif



 39 class StubCodeGenerator;
 40 



























































 41 class ShenandoahBarrierSetAssembler: public BarrierSetAssembler {

 42 private:
 43 
 44   void satb_barrier(MacroAssembler* masm,
 45                     Register obj,
 46                     Register pre_val,
 47                     Register thread,
 48                     Register tmp1,
 49                     Register tmp2,
 50                     bool tosca_live,
 51                     bool expand_call);
 52 
 53   void card_barrier(MacroAssembler* masm, Register obj);
 54 
 55   void resolve_forward_pointer(MacroAssembler* masm, Register dst, Register tmp = noreg);
 56   void resolve_forward_pointer_not_null(MacroAssembler* masm, Register dst, Register tmp = noreg);
 57   void load_reference_barrier(MacroAssembler* masm, Register dst, Address load_addr, DecoratorSet decorators);
 58 
 59   void gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,
 60                                         Register start, Register count,
 61                                         Register scratch);
 62 
 63 public:
 64   virtual NMethodPatchingType nmethod_patching_type() { return NMethodPatchingType::conc_instruction_and_data_patch; }
 65 
 66 #ifdef COMPILER1
 67   void gen_pre_barrier_stub(LIR_Assembler* ce, ShenandoahPreBarrierStub* stub);
 68   void gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub);
 69   void generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm);
 70   void generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm, DecoratorSet decorators);
 71 #endif
 72 
 73   virtual void arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
 74                                   Register src, Register dst, Register count, RegSet saved_regs);
 75   virtual void arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
 76                                   Register start, Register count, Register tmp);
 77   virtual void load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
 78                        Register dst, Address src, Register tmp1, Register tmp2);
 79   virtual void store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
 80                         Address dst, Register val, Register tmp1, Register tmp2, Register tmp3);
 81   virtual void try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
 82                                              Register obj, Register tmp, Label& slowpath);








 83 #ifdef COMPILER2










 84   virtual void try_resolve_weak_handle_in_c2(MacroAssembler* masm, Register obj, Register tmp, Label& slow_path);
 85 #endif
 86   void cmpxchg_oop(MacroAssembler* masm, Register addr, Register expected, Register new_val,
 87                    bool acquire, bool release, bool is_cae, Register result);
 88 };
 89 
 90 #endif // CPU_AARCH64_GC_SHENANDOAH_SHENANDOAHBARRIERSETASSEMBLER_AARCH64_HPP

 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  *
 25  */
 26 
 27 #ifndef CPU_AARCH64_GC_SHENANDOAH_SHENANDOAHBARRIERSETASSEMBLER_AARCH64_HPP
 28 #define CPU_AARCH64_GC_SHENANDOAH_SHENANDOAHBARRIERSETASSEMBLER_AARCH64_HPP
 29 
 30 #include "asm/macroAssembler.hpp"
 31 #include "gc/shared/barrierSetAssembler.hpp"
 32 #include "gc/shenandoah/shenandoahBarrierSet.hpp"
 33 #ifdef COMPILER1
 34 class LIR_Assembler;
 35 class ShenandoahPreBarrierStub;
 36 class ShenandoahLoadReferenceBarrierStub;
 37 class StubAssembler;
 38 #endif
 39 #ifdef COMPILER2
 40 class MachNode;
 41 #endif // COMPILER2
 42 class StubCodeGenerator;
 43 
 44 // Barriers on aarch64 are implemented with a test-and-branch immediate instruction.
 45 // This immediate has a max delta of 32K. Because of this the branch is implemented with
 46 // a small jump, as follows:
 47 //      __ tbz(gcs, bits_to_check, L_short_branch);
 48 //      __ b(*stub->entry());
 49 //      __ bind(L_short_branch);
 50 //
 51 // If we can guarantee that the *stub->entry() label is within 32K we can replace the above
 52 // code with:
 53 //      __ tbnz(gcs, bits_to_check, *stub->entry());
 54 //
 55 // From the branch shortening part of PhaseOutput we get a pessimistic code size that the code
 56 // will not grow beyond.
 57 //
 58 // The stubs objects are created and registered when the barriers are emitted. The decision
 59 // between emitting the long branch or the test and branch is done at this point and uses the
 60 // pessimistic code size from branch shortening.
 61 //
 62 // After the code has been emitted the barrier set will emit all the stubs. When the stubs are
 63 // emitted we know the real code size. Because of this the trampoline jump can be skipped in
 64 // favour of emitting the stub directly if it does not interfere with the next trampoline stub.
 65 // (With respect to test and branch distance)
 66 //
 67 // The algorithm for emitting the load barrier branches and stubs now have three versions
 68 // depending on the distance between the barrier and the stub.
 69 // Version 1: Not Reachable with a test-and-branch immediate
 70 // Version 2: Reachable with a test-and-branch immediate via trampoline
 71 // Version 3: Reachable with a test-and-branch immediate without trampoline
 72 //
 73 //     +--------------------- Code ----------------------+
 74 //     |                      ***                        |
 75 //     | tbz(gcs, bits_to_check, L_short_branch);        |
 76 //     | b(stub1)                                        | (Version 1)
 77 //     | bind(L_short_branch);                           |
 78 //     |                      ***                        |
 79 //     | tbnz(gcs, bits_to_check, tramp)                 | (Version 2)
 80 //     |                      ***                        |
 81 //     | tbnz(gcs, bits_to_check, stub3)                 | (Version 3)
 82 //     |                      ***                        |
 83 //     +--------------------- Stub ----------------------+
 84 //     | tramp: b(stub2)                                 | (Trampoline slot)
 85 //     | stub3:                                          |
 86 //     |                  * Stub Code*                   |
 87 //     | stub1:                                          |
 88 //     |                  * Stub Code*                   |
 89 //     | stub2:                                          |
 90 //     |                  * Stub Code*                   |
 91 //     +-------------------------------------------------+
 92 //
 93 //  Version 1: Is emitted if the pessimistic distance between the branch instruction and the current
 94 //             trampoline slot cannot fit in a test and branch immediate.
 95 //
 96 //  Version 2: Is emitted if the distance between the branch instruction and the current trampoline
 97 //             slot can fit in a test and branch immediate. But emitting the stub directly would
 98 //             interfere with the next trampoline.
 99 //
100 //  Version 3: Same as version two but emitting the stub directly (skipping the trampoline) does not
101 //             interfere with the next trampoline.
102 //
103 class ShenandoahBarrierSetAssembler: public BarrierSetAssembler {
104   friend class ShenandoahCASBarrierSlowStub;
105 private:
106 
107   void satb_barrier(MacroAssembler* masm,
108                     Register obj,
109                     Register pre_val,
110                     Register thread,
111                     Register tmp1,
112                     Register tmp2,
113                     bool tosca_live,
114                     bool expand_call);
115 
116   void card_barrier(MacroAssembler* masm, Register obj);
117 
118   void resolve_forward_pointer(MacroAssembler* masm, Register dst, Register tmp = noreg);
119   void resolve_forward_pointer_not_null(MacroAssembler* masm, Register dst, Register tmp = noreg);
120   void load_reference_barrier(MacroAssembler* masm, Register dst, Address load_addr, DecoratorSet decorators);
121 
122   void gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,
123                                         Register start, Register count,
124                                         Register scratch);
125 
126 public:
127   virtual NMethodPatchingType nmethod_patching_type() { return NMethodPatchingType::conc_instruction_and_data_patch; }
128 
129   void cmpxchg_oop(MacroAssembler* masm, Register addr, Register expected, Register new_val,
130                    bool acquire, bool release, bool is_cae, Register result);





131   virtual void arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
132                                   Register src, Register dst, Register count, RegSet saved_regs);
133   virtual void arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
134                                   Register start, Register count, Register tmp);
135   virtual void load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
136                        Register dst, Address src, Register tmp1, Register tmp2);
137   virtual void store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
138                         Address dst, Register val, Register tmp1, Register tmp2, Register tmp3);
139   virtual void try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
140                                              Register obj, Register tmp, Label& slowpath);
141 
142 #ifdef COMPILER1
143   void gen_pre_barrier_stub(LIR_Assembler* ce, ShenandoahPreBarrierStub* stub);
144   void gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub);
145   void generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm);
146   void generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm, DecoratorSet decorators);
147 #endif
148 
149 #ifdef COMPILER2
150   // Entry points from Matcher
151   void store_c2(const MachNode* node, MacroAssembler* masm, Address dst, bool dst_narrow, Register src,
152       bool src_narrow, Register tmp);
153   void compare_and_set_c2(const MachNode* node, MacroAssembler* masm, Register res, Register addr, Register oldval,
154       Register newval, Register tmp, bool exchange, bool maybe_null, bool narrow, bool weak);
155   void get_and_set_c2(const MachNode* node, MacroAssembler* masm, Register preval, Register newval,
156       Register addr, Register tmp);
157   void load_c2(const MachNode* node, MacroAssembler* masm, Register dst, Address addr);
158   void gc_state_check_c2(MacroAssembler* masm, Register rscratch, const unsigned char test_state, BarrierStubC2* slow_stub);
159   void card_barrier_c2(const MachNode* node, MacroAssembler* masm, Address addr);
160   virtual void try_resolve_weak_handle_in_c2(MacroAssembler* masm, Register obj, Register tmp, Label& slow_path);
161 #endif


162 };
163 
164 #endif // CPU_AARCH64_GC_SHENANDOAH_SHENANDOAHBARRIERSETASSEMBLER_AARCH64_HPP
< prev index next >