823 st->print("\n\t");
824 st->print("cmpl [r15_thread + #disarmed_guard_value_offset], #disarmed_guard_value\t");
825 st->print("\n\t");
826 st->print("je fast_entry\t");
827 st->print("\n\t");
828 st->print("call #nmethod_entry_barrier_stub\t");
829 st->print("\n\tfast_entry:");
830 }
831 st->cr();
832 }
833 #endif
834
835 void MachPrologNode::emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const {
836 Compile* C = ra_->C;
837
838 int framesize = C->output()->frame_size_in_bytes();
839 int bangsize = C->output()->bang_size_in_bytes();
840
841 if (C->clinit_barrier_on_entry()) {
842 assert(VM_Version::supports_fast_class_init_checks(), "sanity");
843 assert(!C->method()->holder()->is_not_initialized(), "initialization should have been started");
844
845 Label L_skip_barrier;
846 Register klass = rscratch1;
847
848 __ mov_metadata(klass, C->method()->holder()->constant_encoding());
849 __ clinit_barrier(klass, r15_thread, &L_skip_barrier /*L_fast_path*/);
850
851 __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub())); // slow path
852
853 __ bind(L_skip_barrier);
854 }
855
856 __ verified_entry(framesize, C->output()->need_stack_bang(bangsize)?bangsize:0, false, C->stub_function() != nullptr);
857
858 C->output()->set_frame_complete(__ offset());
859
860 if (C->has_mach_constant_base_node()) {
861 // NOTE: We set the table base offset here because users might be
862 // emitted before MachConstantBaseNode.
863 ConstantTable& constant_table = C->output()->constant_table();
1849 if ($primary) {
1850 __ xorptr(Rrdi, Rrdi);
1851 }
1852 __ bind(miss);
1853 %}
1854
1855 enc_class clear_avx %{
1856 debug_only(int off0 = __ offset());
1857 if (generate_vzeroupper(Compile::current())) {
1858 // Clear upper bits of YMM registers to avoid AVX <-> SSE transition penalty
1859 // Clear upper bits of YMM registers when current compiled code uses
1860 // wide vectors to avoid AVX <-> SSE transition penalty during call.
1861 __ vzeroupper();
1862 }
1863 debug_only(int off1 = __ offset());
1864 assert(off1 - off0 == clear_avx_size(), "correct size prediction");
1865 %}
1866
1867 enc_class Java_To_Runtime(method meth) %{
1868 // No relocation needed
1869 __ mov64(r10, (int64_t) $meth$$method);
1870 __ call(r10);
1871 __ post_call_nop();
1872 %}
1873
1874 enc_class Java_Static_Call(method meth)
1875 %{
1876 // JAVA STATIC CALL
1877 // CALL to fixup routine. Fixup routine uses ScopeDesc info to
1878 // determine who we intended to call.
1879 if (!_method) {
1880 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, $meth$$method)));
1881 } else if (_method->intrinsic_id() == vmIntrinsicID::_ensureMaterializedForStackWalk) {
1882 // The NOP here is purely to ensure that eliding a call to
1883 // JVM_EnsureMaterializedForStackWalk doesn't change the code size.
1884 __ addr_nop_5();
1885 __ block_comment("call JVM_EnsureMaterializedForStackWalk (elided)");
1886 } else {
1887 int method_index = resolved_method_index(masm);
1888 RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
1889 : static_call_Relocation::spec(method_index);
|
823 st->print("\n\t");
824 st->print("cmpl [r15_thread + #disarmed_guard_value_offset], #disarmed_guard_value\t");
825 st->print("\n\t");
826 st->print("je fast_entry\t");
827 st->print("\n\t");
828 st->print("call #nmethod_entry_barrier_stub\t");
829 st->print("\n\tfast_entry:");
830 }
831 st->cr();
832 }
833 #endif
834
835 void MachPrologNode::emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const {
836 Compile* C = ra_->C;
837
838 int framesize = C->output()->frame_size_in_bytes();
839 int bangsize = C->output()->bang_size_in_bytes();
840
841 if (C->clinit_barrier_on_entry()) {
842 assert(VM_Version::supports_fast_class_init_checks(), "sanity");
843 assert(!C->method()->holder()->is_not_initialized() || C->do_clinit_barriers(), "initialization should have been started");
844
845 Label L_skip_barrier;
846 Register klass = rscratch1;
847
848 __ mov_metadata(klass, C->method()->holder()->constant_encoding());
849 __ clinit_barrier(klass, r15_thread, &L_skip_barrier /*L_fast_path*/);
850
851 __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub())); // slow path
852
853 __ bind(L_skip_barrier);
854 }
855
856 __ verified_entry(framesize, C->output()->need_stack_bang(bangsize)?bangsize:0, false, C->stub_function() != nullptr);
857
858 C->output()->set_frame_complete(__ offset());
859
860 if (C->has_mach_constant_base_node()) {
861 // NOTE: We set the table base offset here because users might be
862 // emitted before MachConstantBaseNode.
863 ConstantTable& constant_table = C->output()->constant_table();
1849 if ($primary) {
1850 __ xorptr(Rrdi, Rrdi);
1851 }
1852 __ bind(miss);
1853 %}
1854
1855 enc_class clear_avx %{
1856 debug_only(int off0 = __ offset());
1857 if (generate_vzeroupper(Compile::current())) {
1858 // Clear upper bits of YMM registers to avoid AVX <-> SSE transition penalty
1859 // Clear upper bits of YMM registers when current compiled code uses
1860 // wide vectors to avoid AVX <-> SSE transition penalty during call.
1861 __ vzeroupper();
1862 }
1863 debug_only(int off1 = __ offset());
1864 assert(off1 - off0 == clear_avx_size(), "correct size prediction");
1865 %}
1866
1867 enc_class Java_To_Runtime(method meth) %{
1868 // No relocation needed
1869 if (SCCache::is_on_for_write()) {
1870 // Created runtime_call_type relocation when caching code
1871 __ lea(r10, RuntimeAddress((address)$meth$$method));
1872 } else {
1873 __ mov64(r10, (int64_t) $meth$$method);
1874 }
1875 __ call(r10);
1876 __ post_call_nop();
1877 %}
1878
1879 enc_class Java_Static_Call(method meth)
1880 %{
1881 // JAVA STATIC CALL
1882 // CALL to fixup routine. Fixup routine uses ScopeDesc info to
1883 // determine who we intended to call.
1884 if (!_method) {
1885 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, $meth$$method)));
1886 } else if (_method->intrinsic_id() == vmIntrinsicID::_ensureMaterializedForStackWalk) {
1887 // The NOP here is purely to ensure that eliding a call to
1888 // JVM_EnsureMaterializedForStackWalk doesn't change the code size.
1889 __ addr_nop_5();
1890 __ block_comment("call JVM_EnsureMaterializedForStackWalk (elided)");
1891 } else {
1892 int method_index = resolved_method_index(masm);
1893 RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
1894 : static_call_Relocation::spec(method_index);
|