713 st->print("\n\t");
714 st->print("cmpl [r15_thread + #disarmed_guard_value_offset], #disarmed_guard_value\t");
715 st->print("\n\t");
716 st->print("je fast_entry\t");
717 st->print("\n\t");
718 st->print("call #nmethod_entry_barrier_stub\t");
719 st->print("\n\tfast_entry:");
720 }
721 st->cr();
722 }
723 #endif
724
725 void MachPrologNode::emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const {
726 Compile* C = ra_->C;
727
728 int framesize = C->output()->frame_size_in_bytes();
729 int bangsize = C->output()->bang_size_in_bytes();
730
731 if (C->clinit_barrier_on_entry()) {
732 assert(VM_Version::supports_fast_class_init_checks(), "sanity");
733 assert(!C->method()->holder()->is_not_initialized(), "initialization should have been started");
734
735 Label L_skip_barrier;
736 Register klass = rscratch1;
737
738 __ mov_metadata(klass, C->method()->holder()->constant_encoding());
739 __ clinit_barrier(klass, r15_thread, &L_skip_barrier /*L_fast_path*/);
740
741 __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub())); // slow path
742
743 __ bind(L_skip_barrier);
744 }
745
746 __ verified_entry(framesize, C->output()->need_stack_bang(bangsize)?bangsize:0, false, C->stub_function() != nullptr);
747
748 C->output()->set_frame_complete(__ offset());
749
750 if (C->has_mach_constant_base_node()) {
751 // NOTE: We set the table base offset here because users might be
752 // emitted before MachConstantBaseNode.
753 ConstantTable& constant_table = C->output()->constant_table();
1739 if ($primary) {
1740 __ xorptr(Rrdi, Rrdi);
1741 }
1742 __ bind(miss);
1743 %}
1744
1745 enc_class clear_avx %{
1746 debug_only(int off0 = __ offset());
1747 if (generate_vzeroupper(Compile::current())) {
1748 // Clear upper bits of YMM registers to avoid AVX <-> SSE transition penalty
1749 // Clear upper bits of YMM registers when current compiled code uses
1750 // wide vectors to avoid AVX <-> SSE transition penalty during call.
1751 __ vzeroupper();
1752 }
1753 debug_only(int off1 = __ offset());
1754 assert(off1 - off0 == clear_avx_size(), "correct size prediction");
1755 %}
1756
1757 enc_class Java_To_Runtime(method meth) %{
1758 // No relocation needed
1759 __ mov64(r10, (int64_t) $meth$$method);
1760 __ call(r10);
1761 __ post_call_nop();
1762 %}
1763
1764 enc_class Java_Static_Call(method meth)
1765 %{
1766 // JAVA STATIC CALL
1767 // CALL to fixup routine. Fixup routine uses ScopeDesc info to
1768 // determine who we intended to call.
1769 if (!_method) {
1770 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, $meth$$method)));
1771 } else if (_method->intrinsic_id() == vmIntrinsicID::_ensureMaterializedForStackWalk) {
1772 // The NOP here is purely to ensure that eliding a call to
1773 // JVM_EnsureMaterializedForStackWalk doesn't change the code size.
1774 __ addr_nop_5();
1775 __ block_comment("call JVM_EnsureMaterializedForStackWalk (elided)");
1776 } else {
1777 int method_index = resolved_method_index(masm);
1778 RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
1779 : static_call_Relocation::spec(method_index);
|
713 st->print("\n\t");
714 st->print("cmpl [r15_thread + #disarmed_guard_value_offset], #disarmed_guard_value\t");
715 st->print("\n\t");
716 st->print("je fast_entry\t");
717 st->print("\n\t");
718 st->print("call #nmethod_entry_barrier_stub\t");
719 st->print("\n\tfast_entry:");
720 }
721 st->cr();
722 }
723 #endif
724
725 void MachPrologNode::emit(C2_MacroAssembler *masm, PhaseRegAlloc *ra_) const {
726 Compile* C = ra_->C;
727
728 int framesize = C->output()->frame_size_in_bytes();
729 int bangsize = C->output()->bang_size_in_bytes();
730
731 if (C->clinit_barrier_on_entry()) {
732 assert(VM_Version::supports_fast_class_init_checks(), "sanity");
733 assert(!C->method()->holder()->is_not_initialized() || C->do_clinit_barriers(), "initialization should have been started");
734
735 Label L_skip_barrier;
736 Register klass = rscratch1;
737
738 __ mov_metadata(klass, C->method()->holder()->constant_encoding());
739 __ clinit_barrier(klass, r15_thread, &L_skip_barrier /*L_fast_path*/);
740
741 __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub())); // slow path
742
743 __ bind(L_skip_barrier);
744 }
745
746 __ verified_entry(framesize, C->output()->need_stack_bang(bangsize)?bangsize:0, false, C->stub_function() != nullptr);
747
748 C->output()->set_frame_complete(__ offset());
749
750 if (C->has_mach_constant_base_node()) {
751 // NOTE: We set the table base offset here because users might be
752 // emitted before MachConstantBaseNode.
753 ConstantTable& constant_table = C->output()->constant_table();
1739 if ($primary) {
1740 __ xorptr(Rrdi, Rrdi);
1741 }
1742 __ bind(miss);
1743 %}
1744
1745 enc_class clear_avx %{
1746 debug_only(int off0 = __ offset());
1747 if (generate_vzeroupper(Compile::current())) {
1748 // Clear upper bits of YMM registers to avoid AVX <-> SSE transition penalty
1749 // Clear upper bits of YMM registers when current compiled code uses
1750 // wide vectors to avoid AVX <-> SSE transition penalty during call.
1751 __ vzeroupper();
1752 }
1753 debug_only(int off1 = __ offset());
1754 assert(off1 - off0 == clear_avx_size(), "correct size prediction");
1755 %}
1756
1757 enc_class Java_To_Runtime(method meth) %{
1758 // No relocation needed
1759 if (SCCache::is_on_for_write()) {
1760 // Created runtime_call_type relocation when caching code
1761 __ lea(r10, RuntimeAddress((address)$meth$$method));
1762 } else {
1763 __ mov64(r10, (int64_t) $meth$$method);
1764 }
1765 __ call(r10);
1766 __ post_call_nop();
1767 %}
1768
1769 enc_class Java_Static_Call(method meth)
1770 %{
1771 // JAVA STATIC CALL
1772 // CALL to fixup routine. Fixup routine uses ScopeDesc info to
1773 // determine who we intended to call.
1774 if (!_method) {
1775 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, $meth$$method)));
1776 } else if (_method->intrinsic_id() == vmIntrinsicID::_ensureMaterializedForStackWalk) {
1777 // The NOP here is purely to ensure that eliding a call to
1778 // JVM_EnsureMaterializedForStackWalk doesn't change the code size.
1779 __ addr_nop_5();
1780 __ block_comment("call JVM_EnsureMaterializedForStackWalk (elided)");
1781 } else {
1782 int method_index = resolved_method_index(masm);
1783 RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
1784 : static_call_Relocation::spec(method_index);
|