< prev index next >

src/hotspot/cpu/x86/stubGenerator_x86_64.cpp

Print this page

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"

  26 #include "asm/macroAssembler.hpp"
  27 #include "classfile/vmIntrinsics.hpp"
  28 #include "compiler/oopMap.hpp"
  29 #include "gc/shared/barrierSet.hpp"
  30 #include "gc/shared/barrierSetAssembler.hpp"
  31 #include "gc/shared/barrierSetNMethod.hpp"
  32 #include "gc/shared/gc_globals.hpp"
  33 #include "memory/universe.hpp"
  34 #include "prims/jvmtiExport.hpp"
  35 #include "prims/upcallLinker.hpp"
  36 #include "runtime/arguments.hpp"
  37 #include "runtime/javaThread.hpp"
  38 #include "runtime/sharedRuntime.hpp"
  39 #include "runtime/stubRoutines.hpp"


  40 #include "stubGenerator_x86_64.hpp"
  41 #ifdef COMPILER2
  42 #include "opto/runtime.hpp"
  43 #include "opto/c2_globals.hpp"
  44 #endif
  45 #if INCLUDE_JVMCI
  46 #include "jvmci/jvmci_globals.hpp"
  47 #endif
  48 
  49 // For a more detailed description of the stub routine structure
  50 // see the comment in stubRoutines.hpp
  51 
  52 #define __ _masm->
  53 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
  54 
  55 #ifdef PRODUCT
  56 #define BLOCK_COMMENT(str) /* nothing */
  57 #else
  58 #define BLOCK_COMMENT(str) __ block_comment(str)
  59 #endif // PRODUCT

 285   __ BIND(loop);
 286   __ movptr(rax, Address(c_rarg2, 0));// get parameter
 287   __ addptr(c_rarg2, wordSize);       // advance to next parameter
 288   __ decrementl(c_rarg1);             // decrement counter
 289   __ push(rax);                       // pass parameter
 290   __ jcc(Assembler::notZero, loop);
 291 
 292   // call Java function
 293   __ BIND(parameters_done);
 294   __ movptr(rbx, method);             // get Method*
 295   __ movptr(c_rarg1, entry_point);    // get entry_point
 296   __ mov(r13, rsp);                   // set sender sp
 297   BLOCK_COMMENT("call Java function");
 298   __ call(c_rarg1);
 299 
 300   BLOCK_COMMENT("call_stub_return_address:");
 301   return_address = __ pc();
 302 
 303   // store result depending on type (everything that is not
 304   // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
 305   __ movptr(c_rarg0, result);
 306   Label is_long, is_float, is_double, exit;
 307   __ movl(c_rarg1, result_type);
 308   __ cmpl(c_rarg1, T_OBJECT);


 309   __ jcc(Assembler::equal, is_long);
 310   __ cmpl(c_rarg1, T_LONG);
 311   __ jcc(Assembler::equal, is_long);
 312   __ cmpl(c_rarg1, T_FLOAT);
 313   __ jcc(Assembler::equal, is_float);
 314   __ cmpl(c_rarg1, T_DOUBLE);
 315   __ jcc(Assembler::equal, is_double);
 316 #ifdef ASSERT
 317   // make sure the type is INT
 318   {
 319     Label L;
 320     __ cmpl(c_rarg1, T_INT);
 321     __ jcc(Assembler::equal, L);
 322     __ stop("StubRoutines::call_stub: unexpected result type");
 323     __ bind(L);
 324   }
 325 #endif
 326 
 327   // handle T_INT case
 328   __ movl(Address(c_rarg0, 0), rax);
 329 
 330   __ BIND(exit);
 331 
 332   // pop parameters
 333   __ lea(rsp, rsp_after_call);
 334 
 335 #ifdef ASSERT
 336   // verify that threads correspond
 337   {
 338    Label L1, L2, L3;
 339     __ cmpptr(r15_thread, thread);
 340     __ jcc(Assembler::equal, L1);
 341     __ stop("StubRoutines::call_stub: r15_thread is corrupted");
 342     __ bind(L1);
 343     __ get_thread(rbx);
 344     __ cmpptr(r15_thread, thread);
 345     __ jcc(Assembler::equal, L2);
 346     __ stop("StubRoutines::call_stub: r15_thread is modified by call");
 347     __ bind(L2);
 348     __ cmpptr(r15_thread, rbx);

 366   __ movptr(r13, r13_save);
 367   __ movptr(r12, r12_save);
 368   __ movptr(rbx, rbx_save);
 369 
 370 #ifdef _WIN64
 371   __ movptr(rdi, rdi_save);
 372   __ movptr(rsi, rsi_save);
 373 #else
 374   __ ldmxcsr(mxcsr_save);
 375 #endif
 376 
 377   // restore rsp
 378   __ addptr(rsp, -rsp_after_call_off * wordSize);
 379 
 380   // return
 381   __ vzeroupper();
 382   __ pop(rbp);
 383   __ ret(0);
 384 
 385   // handle return types different from T_INT













 386   __ BIND(is_long);
 387   __ movq(Address(c_rarg0, 0), rax);
 388   __ jmp(exit);
 389 
 390   __ BIND(is_float);
 391   __ movflt(Address(c_rarg0, 0), xmm0);
 392   __ jmp(exit);
 393 
 394   __ BIND(is_double);
 395   __ movdbl(Address(c_rarg0, 0), xmm0);
 396   __ jmp(exit);
 397 
 398   return start;
 399 }
 400 
 401 // Return point for a Java call if there's an exception thrown in
 402 // Java code.  The exception is caught and transformed into a
 403 // pending exception stored in JavaThread that can be tested from
 404 // within the VM.
 405 //
 406 // Note: Usually the parameters are removed by the callee. In case
 407 // of an exception crossing an activation frame boundary, that is
 408 // not the case if the callee is compiled code => need to setup the
 409 // rsp.
 410 //
 411 // rax: exception oop
 412 
 413 address StubGenerator::generate_catch_exception() {
 414   StubCodeMark mark(this, "StubRoutines", "catch_exception");
 415   address start = __ pc();

3851 // Initialization
3852 void StubGenerator::generate_initial_stubs() {
3853   // Generates all stubs and initializes the entry points
3854 
3855   // This platform-specific settings are needed by generate_call_stub()
3856   create_control_words();
3857 
3858   // Initialize table for unsafe copy memeory check.
3859   if (UnsafeMemoryAccess::_table == nullptr) {
3860     UnsafeMemoryAccess::create_table(16 + 4); // 16 for copyMemory; 4 for setMemory
3861   }
3862 
3863   // entry points that exist in all platforms Note: This is code
3864   // that could be shared among different platforms - however the
3865   // benefit seems to be smaller than the disadvantage of having a
3866   // much more complicated generator structure. See also comment in
3867   // stubRoutines.hpp.
3868 
3869   StubRoutines::_forward_exception_entry = generate_forward_exception();
3870 










3871   StubRoutines::_call_stub_entry =
3872     generate_call_stub(StubRoutines::_call_stub_return_address);
3873 
3874   // is referenced by megamorphic call
3875   StubRoutines::_catch_exception_entry = generate_catch_exception();
3876 
3877   // atomic calls
3878   StubRoutines::_fence_entry                = generate_orderaccess_fence();
3879 
3880   // platform dependent
3881   StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3882 
3883   StubRoutines::x86::_verify_mxcsr_entry    = generate_verify_mxcsr();
3884 
3885   StubRoutines::x86::_f2i_fixup             = generate_f2i_fixup();
3886   StubRoutines::x86::_f2l_fixup             = generate_f2l_fixup();
3887   StubRoutines::x86::_d2i_fixup             = generate_d2i_fixup();
3888   StubRoutines::x86::_d2l_fixup             = generate_d2l_fixup();
3889 
3890   StubRoutines::x86::_float_sign_mask       = generate_fp_mask("float_sign_mask",  0x7FFFFFFF7FFFFFFF);

3903     StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3904     StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3905     StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3906   }
3907 
3908   if (VM_Version::supports_float16()) {
3909     // For results consistency both intrinsics should be enabled.
3910     // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3911     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3912         vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3913       StubRoutines::_hf2f = generate_float16ToFloat();
3914       StubRoutines::_f2hf = generate_floatToFloat16();
3915     }
3916   }
3917 
3918   generate_libm_stubs();
3919 
3920   StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
3921 }
3922 
















































































































































3923 void StubGenerator::generate_continuation_stubs() {
3924   // Continuation stubs:
3925   StubRoutines::_cont_thaw          = generate_cont_thaw();
3926   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3927   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3928 }
3929 
3930 void StubGenerator::generate_final_stubs() {
3931   // Generates the rest of stubs and initializes the entry points
3932 
3933   // support for verify_oop (must happen after universe_init)
3934   if (VerifyOops) {
3935     StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
3936   }
3937 
3938   // data cache line writeback
3939   StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
3940   StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
3941 
3942   // arraycopy stubs used by compilers

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/assembler.hpp"
  27 #include "asm/macroAssembler.hpp"
  28 #include "classfile/vmIntrinsics.hpp"
  29 #include "compiler/oopMap.hpp"
  30 #include "gc/shared/barrierSet.hpp"
  31 #include "gc/shared/barrierSetAssembler.hpp"
  32 #include "gc/shared/barrierSetNMethod.hpp"
  33 #include "gc/shared/gc_globals.hpp"
  34 #include "memory/universe.hpp"
  35 #include "prims/jvmtiExport.hpp"
  36 #include "prims/upcallLinker.hpp"
  37 #include "runtime/arguments.hpp"
  38 #include "runtime/javaThread.hpp"
  39 #include "runtime/sharedRuntime.hpp"
  40 #include "runtime/stubRoutines.hpp"
  41 #include "utilities/macros.hpp"
  42 #include "vmreg_x86.inline.hpp"
  43 #include "stubGenerator_x86_64.hpp"
  44 #ifdef COMPILER2
  45 #include "opto/runtime.hpp"
  46 #include "opto/c2_globals.hpp"
  47 #endif
  48 #if INCLUDE_JVMCI
  49 #include "jvmci/jvmci_globals.hpp"
  50 #endif
  51 
  52 // For a more detailed description of the stub routine structure
  53 // see the comment in stubRoutines.hpp
  54 
  55 #define __ _masm->
  56 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
  57 
  58 #ifdef PRODUCT
  59 #define BLOCK_COMMENT(str) /* nothing */
  60 #else
  61 #define BLOCK_COMMENT(str) __ block_comment(str)
  62 #endif // PRODUCT

 288   __ BIND(loop);
 289   __ movptr(rax, Address(c_rarg2, 0));// get parameter
 290   __ addptr(c_rarg2, wordSize);       // advance to next parameter
 291   __ decrementl(c_rarg1);             // decrement counter
 292   __ push(rax);                       // pass parameter
 293   __ jcc(Assembler::notZero, loop);
 294 
 295   // call Java function
 296   __ BIND(parameters_done);
 297   __ movptr(rbx, method);             // get Method*
 298   __ movptr(c_rarg1, entry_point);    // get entry_point
 299   __ mov(r13, rsp);                   // set sender sp
 300   BLOCK_COMMENT("call Java function");
 301   __ call(c_rarg1);
 302 
 303   BLOCK_COMMENT("call_stub_return_address:");
 304   return_address = __ pc();
 305 
 306   // store result depending on type (everything that is not
 307   // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
 308   __ movptr(r13, result);
 309   Label is_long, is_float, is_double, check_prim, exit;
 310   __ movl(rbx, result_type);
 311   __ cmpl(rbx, T_OBJECT);
 312   __ jcc(Assembler::equal, check_prim);
 313   __ cmpl(rbx, T_LONG);
 314   __ jcc(Assembler::equal, is_long);
 315   __ cmpl(rbx, T_FLOAT);


 316   __ jcc(Assembler::equal, is_float);
 317   __ cmpl(rbx, T_DOUBLE);
 318   __ jcc(Assembler::equal, is_double);
 319 #ifdef ASSERT
 320   // make sure the type is INT
 321   {
 322     Label L;
 323     __ cmpl(rbx, T_INT);
 324     __ jcc(Assembler::equal, L);
 325     __ stop("StubRoutines::call_stub: unexpected result type");
 326     __ bind(L);
 327   }
 328 #endif
 329 
 330   // handle T_INT case
 331   __ movl(Address(r13, 0), rax);
 332 
 333   __ BIND(exit);
 334 
 335   // pop parameters
 336   __ lea(rsp, rsp_after_call);
 337 
 338 #ifdef ASSERT
 339   // verify that threads correspond
 340   {
 341    Label L1, L2, L3;
 342     __ cmpptr(r15_thread, thread);
 343     __ jcc(Assembler::equal, L1);
 344     __ stop("StubRoutines::call_stub: r15_thread is corrupted");
 345     __ bind(L1);
 346     __ get_thread(rbx);
 347     __ cmpptr(r15_thread, thread);
 348     __ jcc(Assembler::equal, L2);
 349     __ stop("StubRoutines::call_stub: r15_thread is modified by call");
 350     __ bind(L2);
 351     __ cmpptr(r15_thread, rbx);

 369   __ movptr(r13, r13_save);
 370   __ movptr(r12, r12_save);
 371   __ movptr(rbx, rbx_save);
 372 
 373 #ifdef _WIN64
 374   __ movptr(rdi, rdi_save);
 375   __ movptr(rsi, rsi_save);
 376 #else
 377   __ ldmxcsr(mxcsr_save);
 378 #endif
 379 
 380   // restore rsp
 381   __ addptr(rsp, -rsp_after_call_off * wordSize);
 382 
 383   // return
 384   __ vzeroupper();
 385   __ pop(rbp);
 386   __ ret(0);
 387 
 388   // handle return types different from T_INT
 389   __ BIND(check_prim);
 390   if (InlineTypeReturnedAsFields) {
 391     // Check for scalarized return value
 392     __ testptr(rax, 1);
 393     __ jcc(Assembler::zero, is_long);
 394     // Load pack handler address
 395     __ andptr(rax, -2);
 396     __ movptr(rax, Address(rax, InstanceKlass::adr_inlineklass_fixed_block_offset()));
 397     __ movptr(rbx, Address(rax, InlineKlass::pack_handler_jobject_offset()));
 398     // Call pack handler to initialize the buffer
 399     __ call(rbx);
 400     __ jmp(exit);
 401   }
 402   __ BIND(is_long);
 403   __ movq(Address(r13, 0), rax);
 404   __ jmp(exit);
 405 
 406   __ BIND(is_float);
 407   __ movflt(Address(r13, 0), xmm0);
 408   __ jmp(exit);
 409 
 410   __ BIND(is_double);
 411   __ movdbl(Address(r13, 0), xmm0);
 412   __ jmp(exit);
 413 
 414   return start;
 415 }
 416 
 417 // Return point for a Java call if there's an exception thrown in
 418 // Java code.  The exception is caught and transformed into a
 419 // pending exception stored in JavaThread that can be tested from
 420 // within the VM.
 421 //
 422 // Note: Usually the parameters are removed by the callee. In case
 423 // of an exception crossing an activation frame boundary, that is
 424 // not the case if the callee is compiled code => need to setup the
 425 // rsp.
 426 //
 427 // rax: exception oop
 428 
 429 address StubGenerator::generate_catch_exception() {
 430   StubCodeMark mark(this, "StubRoutines", "catch_exception");
 431   address start = __ pc();

3867 // Initialization
3868 void StubGenerator::generate_initial_stubs() {
3869   // Generates all stubs and initializes the entry points
3870 
3871   // This platform-specific settings are needed by generate_call_stub()
3872   create_control_words();
3873 
3874   // Initialize table for unsafe copy memeory check.
3875   if (UnsafeMemoryAccess::_table == nullptr) {
3876     UnsafeMemoryAccess::create_table(16 + 4); // 16 for copyMemory; 4 for setMemory
3877   }
3878 
3879   // entry points that exist in all platforms Note: This is code
3880   // that could be shared among different platforms - however the
3881   // benefit seems to be smaller than the disadvantage of having a
3882   // much more complicated generator structure. See also comment in
3883   // stubRoutines.hpp.
3884 
3885   StubRoutines::_forward_exception_entry = generate_forward_exception();
3886 
3887   // Generate these first because they are called from other stubs
3888   if (InlineTypeReturnedAsFields) {
3889     StubRoutines::_load_inline_type_fields_in_regs =
3890       generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::load_inline_type_fields_in_regs),
3891                                  "load_inline_type_fields_in_regs", false);
3892     StubRoutines::_store_inline_type_fields_to_buf =
3893       generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::store_inline_type_fields_to_buf),
3894                                  "store_inline_type_fields_to_buf", true);
3895   }
3896 
3897   StubRoutines::_call_stub_entry =
3898     generate_call_stub(StubRoutines::_call_stub_return_address);
3899 
3900   // is referenced by megamorphic call
3901   StubRoutines::_catch_exception_entry = generate_catch_exception();
3902 
3903   // atomic calls
3904   StubRoutines::_fence_entry                = generate_orderaccess_fence();
3905 
3906   // platform dependent
3907   StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3908 
3909   StubRoutines::x86::_verify_mxcsr_entry    = generate_verify_mxcsr();
3910 
3911   StubRoutines::x86::_f2i_fixup             = generate_f2i_fixup();
3912   StubRoutines::x86::_f2l_fixup             = generate_f2l_fixup();
3913   StubRoutines::x86::_d2i_fixup             = generate_d2i_fixup();
3914   StubRoutines::x86::_d2l_fixup             = generate_d2l_fixup();
3915 
3916   StubRoutines::x86::_float_sign_mask       = generate_fp_mask("float_sign_mask",  0x7FFFFFFF7FFFFFFF);

3929     StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3930     StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3931     StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3932   }
3933 
3934   if (VM_Version::supports_float16()) {
3935     // For results consistency both intrinsics should be enabled.
3936     // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3937     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3938         vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3939       StubRoutines::_hf2f = generate_float16ToFloat();
3940       StubRoutines::_f2hf = generate_floatToFloat16();
3941     }
3942   }
3943 
3944   generate_libm_stubs();
3945 
3946   StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
3947 }
3948 
3949 // Call here from the interpreter or compiled code to either load
3950 // multiple returned values from the inline type instance being
3951 // returned to registers or to store returned values to a newly
3952 // allocated inline type instance.
3953 // Register is a class, but it would be assigned numerical value.
3954 // "0" is assigned for xmm0. Thus we need to ignore -Wnonnull.
3955 PRAGMA_DIAG_PUSH
3956 PRAGMA_NONNULL_IGNORED
3957 address StubGenerator::generate_return_value_stub(address destination, const char* name, bool has_res) {
3958   // We need to save all registers the calling convention may use so
3959   // the runtime calls read or update those registers. This needs to
3960   // be in sync with SharedRuntime::java_return_convention().
3961   enum layout {
3962     pad_off = frame::arg_reg_save_area_bytes/BytesPerInt, pad_off_2,
3963     rax_off, rax_off_2,
3964     j_rarg5_off, j_rarg5_2,
3965     j_rarg4_off, j_rarg4_2,
3966     j_rarg3_off, j_rarg3_2,
3967     j_rarg2_off, j_rarg2_2,
3968     j_rarg1_off, j_rarg1_2,
3969     j_rarg0_off, j_rarg0_2,
3970     j_farg0_off, j_farg0_2,
3971     j_farg1_off, j_farg1_2,
3972     j_farg2_off, j_farg2_2,
3973     j_farg3_off, j_farg3_2,
3974     j_farg4_off, j_farg4_2,
3975     j_farg5_off, j_farg5_2,
3976     j_farg6_off, j_farg6_2,
3977     j_farg7_off, j_farg7_2,
3978     rbp_off, rbp_off_2,
3979     return_off, return_off_2,
3980 
3981     framesize
3982   };
3983 
3984   CodeBuffer buffer(name, 1000, 512);
3985   MacroAssembler* _masm = new MacroAssembler(&buffer);
3986 
3987   int frame_size_in_bytes = align_up(framesize*BytesPerInt, 16);
3988   assert(frame_size_in_bytes == framesize*BytesPerInt, "misaligned");
3989   int frame_size_in_slots = frame_size_in_bytes / BytesPerInt;
3990   int frame_size_in_words = frame_size_in_bytes / wordSize;
3991 
3992   OopMapSet *oop_maps = new OopMapSet();
3993   OopMap* map = new OopMap(frame_size_in_slots, 0);
3994 
3995   map->set_callee_saved(VMRegImpl::stack2reg(rax_off), rax->as_VMReg());
3996   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg5_off), j_rarg5->as_VMReg());
3997   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg4_off), j_rarg4->as_VMReg());
3998   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg3_off), j_rarg3->as_VMReg());
3999   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg2_off), j_rarg2->as_VMReg());
4000   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg1_off), j_rarg1->as_VMReg());
4001   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg0_off), j_rarg0->as_VMReg());
4002   map->set_callee_saved(VMRegImpl::stack2reg(j_farg0_off), j_farg0->as_VMReg());
4003   map->set_callee_saved(VMRegImpl::stack2reg(j_farg1_off), j_farg1->as_VMReg());
4004   map->set_callee_saved(VMRegImpl::stack2reg(j_farg2_off), j_farg2->as_VMReg());
4005   map->set_callee_saved(VMRegImpl::stack2reg(j_farg3_off), j_farg3->as_VMReg());
4006   map->set_callee_saved(VMRegImpl::stack2reg(j_farg4_off), j_farg4->as_VMReg());
4007   map->set_callee_saved(VMRegImpl::stack2reg(j_farg5_off), j_farg5->as_VMReg());
4008   map->set_callee_saved(VMRegImpl::stack2reg(j_farg6_off), j_farg6->as_VMReg());
4009   map->set_callee_saved(VMRegImpl::stack2reg(j_farg7_off), j_farg7->as_VMReg());
4010 
4011   int start = __ offset();
4012 
4013   __ subptr(rsp, frame_size_in_bytes - 8 /* return address*/);
4014 
4015   __ movptr(Address(rsp, rbp_off * BytesPerInt), rbp);
4016   __ movdbl(Address(rsp, j_farg7_off * BytesPerInt), j_farg7);
4017   __ movdbl(Address(rsp, j_farg6_off * BytesPerInt), j_farg6);
4018   __ movdbl(Address(rsp, j_farg5_off * BytesPerInt), j_farg5);
4019   __ movdbl(Address(rsp, j_farg4_off * BytesPerInt), j_farg4);
4020   __ movdbl(Address(rsp, j_farg3_off * BytesPerInt), j_farg3);
4021   __ movdbl(Address(rsp, j_farg2_off * BytesPerInt), j_farg2);
4022   __ movdbl(Address(rsp, j_farg1_off * BytesPerInt), j_farg1);
4023   __ movdbl(Address(rsp, j_farg0_off * BytesPerInt), j_farg0);
4024 
4025   __ movptr(Address(rsp, j_rarg0_off * BytesPerInt), j_rarg0);
4026   __ movptr(Address(rsp, j_rarg1_off * BytesPerInt), j_rarg1);
4027   __ movptr(Address(rsp, j_rarg2_off * BytesPerInt), j_rarg2);
4028   __ movptr(Address(rsp, j_rarg3_off * BytesPerInt), j_rarg3);
4029   __ movptr(Address(rsp, j_rarg4_off * BytesPerInt), j_rarg4);
4030   __ movptr(Address(rsp, j_rarg5_off * BytesPerInt), j_rarg5);
4031   __ movptr(Address(rsp, rax_off * BytesPerInt), rax);
4032 
4033   int frame_complete = __ offset();
4034 
4035   __ set_last_Java_frame(noreg, noreg, nullptr, rscratch1);
4036 
4037   __ mov(c_rarg0, r15_thread);
4038   __ mov(c_rarg1, rax);
4039 
4040   __ call(RuntimeAddress(destination));
4041 
4042   // Set an oopmap for the call site.
4043 
4044   oop_maps->add_gc_map( __ offset() - start, map);
4045 
4046   // clear last_Java_sp
4047   __ reset_last_Java_frame(false);
4048 
4049   __ movptr(rbp, Address(rsp, rbp_off * BytesPerInt));
4050   __ movdbl(j_farg7, Address(rsp, j_farg7_off * BytesPerInt));
4051   __ movdbl(j_farg6, Address(rsp, j_farg6_off * BytesPerInt));
4052   __ movdbl(j_farg5, Address(rsp, j_farg5_off * BytesPerInt));
4053   __ movdbl(j_farg4, Address(rsp, j_farg4_off * BytesPerInt));
4054   __ movdbl(j_farg3, Address(rsp, j_farg3_off * BytesPerInt));
4055   __ movdbl(j_farg2, Address(rsp, j_farg2_off * BytesPerInt));
4056   __ movdbl(j_farg1, Address(rsp, j_farg1_off * BytesPerInt));
4057   __ movdbl(j_farg0, Address(rsp, j_farg0_off * BytesPerInt));
4058 
4059   __ movptr(j_rarg0, Address(rsp, j_rarg0_off * BytesPerInt));
4060   __ movptr(j_rarg1, Address(rsp, j_rarg1_off * BytesPerInt));
4061   __ movptr(j_rarg2, Address(rsp, j_rarg2_off * BytesPerInt));
4062   __ movptr(j_rarg3, Address(rsp, j_rarg3_off * BytesPerInt));
4063   __ movptr(j_rarg4, Address(rsp, j_rarg4_off * BytesPerInt));
4064   __ movptr(j_rarg5, Address(rsp, j_rarg5_off * BytesPerInt));
4065   __ movptr(rax, Address(rsp, rax_off * BytesPerInt));
4066 
4067   __ addptr(rsp, frame_size_in_bytes-8);
4068 
4069   // check for pending exceptions
4070   Label pending;
4071   __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
4072   __ jcc(Assembler::notEqual, pending);
4073 
4074   if (has_res) {
4075     __ get_vm_result(rax, r15_thread);
4076   }
4077 
4078   __ ret(0);
4079 
4080   __ bind(pending);
4081 
4082   __ movptr(rax, Address(r15_thread, Thread::pending_exception_offset()));
4083   __ jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
4084 
4085   // -------------
4086   // make sure all code is generated
4087   _masm->flush();
4088 
4089   RuntimeStub* stub = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, false);
4090   return stub->entry_point();
4091 }
4092 
4093 void StubGenerator::generate_continuation_stubs() {
4094   // Continuation stubs:
4095   StubRoutines::_cont_thaw          = generate_cont_thaw();
4096   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4097   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4098 }
4099 
4100 void StubGenerator::generate_final_stubs() {
4101   // Generates the rest of stubs and initializes the entry points
4102 
4103   // support for verify_oop (must happen after universe_init)
4104   if (VerifyOops) {
4105     StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
4106   }
4107 
4108   // data cache line writeback
4109   StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
4110   StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
4111 
4112   // arraycopy stubs used by compilers
< prev index next >