< prev index next >

src/hotspot/cpu/x86/stubGenerator_x86_64.cpp

Print this page

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"

  26 #include "asm/macroAssembler.hpp"
  27 #include "classfile/javaClasses.hpp"
  28 #include "classfile/vmIntrinsics.hpp"
  29 #include "compiler/oopMap.hpp"
  30 #include "gc/shared/barrierSet.hpp"
  31 #include "gc/shared/barrierSetAssembler.hpp"
  32 #include "gc/shared/barrierSetNMethod.hpp"
  33 #include "gc/shared/gc_globals.hpp"
  34 #include "memory/universe.hpp"
  35 #include "prims/jvmtiExport.hpp"
  36 #include "prims/upcallLinker.hpp"
  37 #include "runtime/arguments.hpp"
  38 #include "runtime/javaThread.hpp"
  39 #include "runtime/sharedRuntime.hpp"
  40 #include "runtime/stubRoutines.hpp"


  41 #include "stubGenerator_x86_64.hpp"
  42 #ifdef COMPILER2
  43 #include "opto/runtime.hpp"
  44 #include "opto/c2_globals.hpp"
  45 #endif
  46 #if INCLUDE_JVMCI
  47 #include "jvmci/jvmci_globals.hpp"
  48 #endif
  49 
  50 // For a more detailed description of the stub routine structure
  51 // see the comment in stubRoutines.hpp
  52 
  53 #define __ _masm->
  54 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
  55 
  56 #ifdef PRODUCT
  57 #define BLOCK_COMMENT(str) /* nothing */
  58 #else
  59 #define BLOCK_COMMENT(str) __ block_comment(str)
  60 #endif // PRODUCT

 286   __ BIND(loop);
 287   __ movptr(rax, Address(c_rarg2, 0));// get parameter
 288   __ addptr(c_rarg2, wordSize);       // advance to next parameter
 289   __ decrementl(c_rarg1);             // decrement counter
 290   __ push(rax);                       // pass parameter
 291   __ jcc(Assembler::notZero, loop);
 292 
 293   // call Java function
 294   __ BIND(parameters_done);
 295   __ movptr(rbx, method);             // get Method*
 296   __ movptr(c_rarg1, entry_point);    // get entry_point
 297   __ mov(r13, rsp);                   // set sender sp
 298   BLOCK_COMMENT("call Java function");
 299   __ call(c_rarg1);
 300 
 301   BLOCK_COMMENT("call_stub_return_address:");
 302   return_address = __ pc();
 303 
 304   // store result depending on type (everything that is not
 305   // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
 306   __ movptr(c_rarg0, result);
 307   Label is_long, is_float, is_double, exit;
 308   __ movl(c_rarg1, result_type);
 309   __ cmpl(c_rarg1, T_OBJECT);


 310   __ jcc(Assembler::equal, is_long);
 311   __ cmpl(c_rarg1, T_LONG);
 312   __ jcc(Assembler::equal, is_long);
 313   __ cmpl(c_rarg1, T_FLOAT);
 314   __ jcc(Assembler::equal, is_float);
 315   __ cmpl(c_rarg1, T_DOUBLE);
 316   __ jcc(Assembler::equal, is_double);
 317 #ifdef ASSERT
 318   // make sure the type is INT
 319   {
 320     Label L;
 321     __ cmpl(c_rarg1, T_INT);
 322     __ jcc(Assembler::equal, L);
 323     __ stop("StubRoutines::call_stub: unexpected result type");
 324     __ bind(L);
 325   }
 326 #endif
 327 
 328   // handle T_INT case
 329   __ movl(Address(c_rarg0, 0), rax);
 330 
 331   __ BIND(exit);
 332 
 333   // pop parameters
 334   __ lea(rsp, rsp_after_call);
 335 
 336 #ifdef ASSERT
 337   // verify that threads correspond
 338   {
 339    Label L1, L2, L3;
 340     __ cmpptr(r15_thread, thread);
 341     __ jcc(Assembler::equal, L1);
 342     __ stop("StubRoutines::call_stub: r15_thread is corrupted");
 343     __ bind(L1);
 344     __ get_thread(rbx);
 345     __ cmpptr(r15_thread, thread);
 346     __ jcc(Assembler::equal, L2);
 347     __ stop("StubRoutines::call_stub: r15_thread is modified by call");
 348     __ bind(L2);
 349     __ cmpptr(r15_thread, rbx);

 367   __ movptr(r13, r13_save);
 368   __ movptr(r12, r12_save);
 369   __ movptr(rbx, rbx_save);
 370 
 371 #ifdef _WIN64
 372   __ movptr(rdi, rdi_save);
 373   __ movptr(rsi, rsi_save);
 374 #else
 375   __ ldmxcsr(mxcsr_save);
 376 #endif
 377 
 378   // restore rsp
 379   __ addptr(rsp, -rsp_after_call_off * wordSize);
 380 
 381   // return
 382   __ vzeroupper();
 383   __ pop(rbp);
 384   __ ret(0);
 385 
 386   // handle return types different from T_INT













 387   __ BIND(is_long);
 388   __ movq(Address(c_rarg0, 0), rax);
 389   __ jmp(exit);
 390 
 391   __ BIND(is_float);
 392   __ movflt(Address(c_rarg0, 0), xmm0);
 393   __ jmp(exit);
 394 
 395   __ BIND(is_double);
 396   __ movdbl(Address(c_rarg0, 0), xmm0);
 397   __ jmp(exit);
 398 
 399   return start;
 400 }
 401 
 402 // Return point for a Java call if there's an exception thrown in
 403 // Java code.  The exception is caught and transformed into a
 404 // pending exception stored in JavaThread that can be tested from
 405 // within the VM.
 406 //
 407 // Note: Usually the parameters are removed by the callee. In case
 408 // of an exception crossing an activation frame boundary, that is
 409 // not the case if the callee is compiled code => need to setup the
 410 // rsp.
 411 //
 412 // rax: exception oop
 413 
 414 address StubGenerator::generate_catch_exception() {
 415   StubCodeMark mark(this, "StubRoutines", "catch_exception");
 416   address start = __ pc();

3879 // Initialization
3880 void StubGenerator::generate_initial_stubs() {
3881   // Generates all stubs and initializes the entry points
3882 
3883   // This platform-specific settings are needed by generate_call_stub()
3884   create_control_words();
3885 
3886   // Initialize table for unsafe copy memeory check.
3887   if (UnsafeMemoryAccess::_table == nullptr) {
3888     UnsafeMemoryAccess::create_table(16 + 4); // 16 for copyMemory; 4 for setMemory
3889   }
3890 
3891   // entry points that exist in all platforms Note: This is code
3892   // that could be shared among different platforms - however the
3893   // benefit seems to be smaller than the disadvantage of having a
3894   // much more complicated generator structure. See also comment in
3895   // stubRoutines.hpp.
3896 
3897   StubRoutines::_forward_exception_entry = generate_forward_exception();
3898 










3899   StubRoutines::_call_stub_entry =
3900     generate_call_stub(StubRoutines::_call_stub_return_address);
3901 
3902   // is referenced by megamorphic call
3903   StubRoutines::_catch_exception_entry = generate_catch_exception();
3904 
3905   // atomic calls
3906   StubRoutines::_fence_entry                = generate_orderaccess_fence();
3907 
3908   // platform dependent
3909   StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3910 
3911   StubRoutines::x86::_verify_mxcsr_entry    = generate_verify_mxcsr();
3912 
3913   StubRoutines::x86::_f2i_fixup             = generate_f2i_fixup();
3914   StubRoutines::x86::_f2l_fixup             = generate_f2l_fixup();
3915   StubRoutines::x86::_d2i_fixup             = generate_d2i_fixup();
3916   StubRoutines::x86::_d2l_fixup             = generate_d2l_fixup();
3917 
3918   StubRoutines::x86::_float_sign_mask       = generate_fp_mask("float_sign_mask",  0x7FFFFFFF7FFFFFFF);

3931     StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3932     StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3933     StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3934   }
3935 
3936   if (VM_Version::supports_float16()) {
3937     // For results consistency both intrinsics should be enabled.
3938     // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3939     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3940         vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3941       StubRoutines::_hf2f = generate_float16ToFloat();
3942       StubRoutines::_f2hf = generate_floatToFloat16();
3943     }
3944   }
3945 
3946   generate_libm_stubs();
3947 
3948   StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
3949 }
3950 
















































































































































3951 void StubGenerator::generate_continuation_stubs() {
3952   // Continuation stubs:
3953   StubRoutines::_cont_thaw          = generate_cont_thaw();
3954   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3955   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3956 }
3957 
3958 void StubGenerator::generate_final_stubs() {
3959   // Generates the rest of stubs and initializes the entry points
3960 
3961   // support for verify_oop (must happen after universe_init)
3962   if (VerifyOops) {
3963     StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
3964   }
3965 
3966   // data cache line writeback
3967   StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
3968   StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
3969 
3970   // arraycopy stubs used by compilers

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/assembler.hpp"
  27 #include "asm/macroAssembler.hpp"
  28 #include "classfile/javaClasses.hpp"
  29 #include "classfile/vmIntrinsics.hpp"
  30 #include "compiler/oopMap.hpp"
  31 #include "gc/shared/barrierSet.hpp"
  32 #include "gc/shared/barrierSetAssembler.hpp"
  33 #include "gc/shared/barrierSetNMethod.hpp"
  34 #include "gc/shared/gc_globals.hpp"
  35 #include "memory/universe.hpp"
  36 #include "prims/jvmtiExport.hpp"
  37 #include "prims/upcallLinker.hpp"
  38 #include "runtime/arguments.hpp"
  39 #include "runtime/javaThread.hpp"
  40 #include "runtime/sharedRuntime.hpp"
  41 #include "runtime/stubRoutines.hpp"
  42 #include "utilities/macros.hpp"
  43 #include "vmreg_x86.inline.hpp"
  44 #include "stubGenerator_x86_64.hpp"
  45 #ifdef COMPILER2
  46 #include "opto/runtime.hpp"
  47 #include "opto/c2_globals.hpp"
  48 #endif
  49 #if INCLUDE_JVMCI
  50 #include "jvmci/jvmci_globals.hpp"
  51 #endif
  52 
  53 // For a more detailed description of the stub routine structure
  54 // see the comment in stubRoutines.hpp
  55 
  56 #define __ _masm->
  57 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
  58 
  59 #ifdef PRODUCT
  60 #define BLOCK_COMMENT(str) /* nothing */
  61 #else
  62 #define BLOCK_COMMENT(str) __ block_comment(str)
  63 #endif // PRODUCT

 289   __ BIND(loop);
 290   __ movptr(rax, Address(c_rarg2, 0));// get parameter
 291   __ addptr(c_rarg2, wordSize);       // advance to next parameter
 292   __ decrementl(c_rarg1);             // decrement counter
 293   __ push(rax);                       // pass parameter
 294   __ jcc(Assembler::notZero, loop);
 295 
 296   // call Java function
 297   __ BIND(parameters_done);
 298   __ movptr(rbx, method);             // get Method*
 299   __ movptr(c_rarg1, entry_point);    // get entry_point
 300   __ mov(r13, rsp);                   // set sender sp
 301   BLOCK_COMMENT("call Java function");
 302   __ call(c_rarg1);
 303 
 304   BLOCK_COMMENT("call_stub_return_address:");
 305   return_address = __ pc();
 306 
 307   // store result depending on type (everything that is not
 308   // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
 309   __ movptr(r13, result);
 310   Label is_long, is_float, is_double, check_prim, exit;
 311   __ movl(rbx, result_type);
 312   __ cmpl(rbx, T_OBJECT);
 313   __ jcc(Assembler::equal, check_prim);
 314   __ cmpl(rbx, T_LONG);
 315   __ jcc(Assembler::equal, is_long);
 316   __ cmpl(rbx, T_FLOAT);


 317   __ jcc(Assembler::equal, is_float);
 318   __ cmpl(rbx, T_DOUBLE);
 319   __ jcc(Assembler::equal, is_double);
 320 #ifdef ASSERT
 321   // make sure the type is INT
 322   {
 323     Label L;
 324     __ cmpl(rbx, T_INT);
 325     __ jcc(Assembler::equal, L);
 326     __ stop("StubRoutines::call_stub: unexpected result type");
 327     __ bind(L);
 328   }
 329 #endif
 330 
 331   // handle T_INT case
 332   __ movl(Address(r13, 0), rax);
 333 
 334   __ BIND(exit);
 335 
 336   // pop parameters
 337   __ lea(rsp, rsp_after_call);
 338 
 339 #ifdef ASSERT
 340   // verify that threads correspond
 341   {
 342    Label L1, L2, L3;
 343     __ cmpptr(r15_thread, thread);
 344     __ jcc(Assembler::equal, L1);
 345     __ stop("StubRoutines::call_stub: r15_thread is corrupted");
 346     __ bind(L1);
 347     __ get_thread(rbx);
 348     __ cmpptr(r15_thread, thread);
 349     __ jcc(Assembler::equal, L2);
 350     __ stop("StubRoutines::call_stub: r15_thread is modified by call");
 351     __ bind(L2);
 352     __ cmpptr(r15_thread, rbx);

 370   __ movptr(r13, r13_save);
 371   __ movptr(r12, r12_save);
 372   __ movptr(rbx, rbx_save);
 373 
 374 #ifdef _WIN64
 375   __ movptr(rdi, rdi_save);
 376   __ movptr(rsi, rsi_save);
 377 #else
 378   __ ldmxcsr(mxcsr_save);
 379 #endif
 380 
 381   // restore rsp
 382   __ addptr(rsp, -rsp_after_call_off * wordSize);
 383 
 384   // return
 385   __ vzeroupper();
 386   __ pop(rbp);
 387   __ ret(0);
 388 
 389   // handle return types different from T_INT
 390   __ BIND(check_prim);
 391   if (InlineTypeReturnedAsFields) {
 392     // Check for scalarized return value
 393     __ testptr(rax, 1);
 394     __ jcc(Assembler::zero, is_long);
 395     // Load pack handler address
 396     __ andptr(rax, -2);
 397     __ movptr(rax, Address(rax, InstanceKlass::adr_inlineklass_fixed_block_offset()));
 398     __ movptr(rbx, Address(rax, InlineKlass::pack_handler_jobject_offset()));
 399     // Call pack handler to initialize the buffer
 400     __ call(rbx);
 401     __ jmp(exit);
 402   }
 403   __ BIND(is_long);
 404   __ movq(Address(r13, 0), rax);
 405   __ jmp(exit);
 406 
 407   __ BIND(is_float);
 408   __ movflt(Address(r13, 0), xmm0);
 409   __ jmp(exit);
 410 
 411   __ BIND(is_double);
 412   __ movdbl(Address(r13, 0), xmm0);
 413   __ jmp(exit);
 414 
 415   return start;
 416 }
 417 
 418 // Return point for a Java call if there's an exception thrown in
 419 // Java code.  The exception is caught and transformed into a
 420 // pending exception stored in JavaThread that can be tested from
 421 // within the VM.
 422 //
 423 // Note: Usually the parameters are removed by the callee. In case
 424 // of an exception crossing an activation frame boundary, that is
 425 // not the case if the callee is compiled code => need to setup the
 426 // rsp.
 427 //
 428 // rax: exception oop
 429 
 430 address StubGenerator::generate_catch_exception() {
 431   StubCodeMark mark(this, "StubRoutines", "catch_exception");
 432   address start = __ pc();

3895 // Initialization
3896 void StubGenerator::generate_initial_stubs() {
3897   // Generates all stubs and initializes the entry points
3898 
3899   // This platform-specific settings are needed by generate_call_stub()
3900   create_control_words();
3901 
3902   // Initialize table for unsafe copy memeory check.
3903   if (UnsafeMemoryAccess::_table == nullptr) {
3904     UnsafeMemoryAccess::create_table(16 + 4); // 16 for copyMemory; 4 for setMemory
3905   }
3906 
3907   // entry points that exist in all platforms Note: This is code
3908   // that could be shared among different platforms - however the
3909   // benefit seems to be smaller than the disadvantage of having a
3910   // much more complicated generator structure. See also comment in
3911   // stubRoutines.hpp.
3912 
3913   StubRoutines::_forward_exception_entry = generate_forward_exception();
3914 
3915   // Generate these first because they are called from other stubs
3916   if (InlineTypeReturnedAsFields) {
3917     StubRoutines::_load_inline_type_fields_in_regs =
3918       generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::load_inline_type_fields_in_regs),
3919                                  "load_inline_type_fields_in_regs", false);
3920     StubRoutines::_store_inline_type_fields_to_buf =
3921       generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::store_inline_type_fields_to_buf),
3922                                  "store_inline_type_fields_to_buf", true);
3923   }
3924 
3925   StubRoutines::_call_stub_entry =
3926     generate_call_stub(StubRoutines::_call_stub_return_address);
3927 
3928   // is referenced by megamorphic call
3929   StubRoutines::_catch_exception_entry = generate_catch_exception();
3930 
3931   // atomic calls
3932   StubRoutines::_fence_entry                = generate_orderaccess_fence();
3933 
3934   // platform dependent
3935   StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3936 
3937   StubRoutines::x86::_verify_mxcsr_entry    = generate_verify_mxcsr();
3938 
3939   StubRoutines::x86::_f2i_fixup             = generate_f2i_fixup();
3940   StubRoutines::x86::_f2l_fixup             = generate_f2l_fixup();
3941   StubRoutines::x86::_d2i_fixup             = generate_d2i_fixup();
3942   StubRoutines::x86::_d2l_fixup             = generate_d2l_fixup();
3943 
3944   StubRoutines::x86::_float_sign_mask       = generate_fp_mask("float_sign_mask",  0x7FFFFFFF7FFFFFFF);

3957     StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3958     StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3959     StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3960   }
3961 
3962   if (VM_Version::supports_float16()) {
3963     // For results consistency both intrinsics should be enabled.
3964     // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3965     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3966         vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3967       StubRoutines::_hf2f = generate_float16ToFloat();
3968       StubRoutines::_f2hf = generate_floatToFloat16();
3969     }
3970   }
3971 
3972   generate_libm_stubs();
3973 
3974   StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
3975 }
3976 
3977 // Call here from the interpreter or compiled code to either load
3978 // multiple returned values from the inline type instance being
3979 // returned to registers or to store returned values to a newly
3980 // allocated inline type instance.
3981 // Register is a class, but it would be assigned numerical value.
3982 // "0" is assigned for xmm0. Thus we need to ignore -Wnonnull.
3983 PRAGMA_DIAG_PUSH
3984 PRAGMA_NONNULL_IGNORED
3985 address StubGenerator::generate_return_value_stub(address destination, const char* name, bool has_res) {
3986   // We need to save all registers the calling convention may use so
3987   // the runtime calls read or update those registers. This needs to
3988   // be in sync with SharedRuntime::java_return_convention().
3989   enum layout {
3990     pad_off = frame::arg_reg_save_area_bytes/BytesPerInt, pad_off_2,
3991     rax_off, rax_off_2,
3992     j_rarg5_off, j_rarg5_2,
3993     j_rarg4_off, j_rarg4_2,
3994     j_rarg3_off, j_rarg3_2,
3995     j_rarg2_off, j_rarg2_2,
3996     j_rarg1_off, j_rarg1_2,
3997     j_rarg0_off, j_rarg0_2,
3998     j_farg0_off, j_farg0_2,
3999     j_farg1_off, j_farg1_2,
4000     j_farg2_off, j_farg2_2,
4001     j_farg3_off, j_farg3_2,
4002     j_farg4_off, j_farg4_2,
4003     j_farg5_off, j_farg5_2,
4004     j_farg6_off, j_farg6_2,
4005     j_farg7_off, j_farg7_2,
4006     rbp_off, rbp_off_2,
4007     return_off, return_off_2,
4008 
4009     framesize
4010   };
4011 
4012   CodeBuffer buffer(name, 1000, 512);
4013   MacroAssembler* _masm = new MacroAssembler(&buffer);
4014 
4015   int frame_size_in_bytes = align_up(framesize*BytesPerInt, 16);
4016   assert(frame_size_in_bytes == framesize*BytesPerInt, "misaligned");
4017   int frame_size_in_slots = frame_size_in_bytes / BytesPerInt;
4018   int frame_size_in_words = frame_size_in_bytes / wordSize;
4019 
4020   OopMapSet *oop_maps = new OopMapSet();
4021   OopMap* map = new OopMap(frame_size_in_slots, 0);
4022 
4023   map->set_callee_saved(VMRegImpl::stack2reg(rax_off), rax->as_VMReg());
4024   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg5_off), j_rarg5->as_VMReg());
4025   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg4_off), j_rarg4->as_VMReg());
4026   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg3_off), j_rarg3->as_VMReg());
4027   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg2_off), j_rarg2->as_VMReg());
4028   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg1_off), j_rarg1->as_VMReg());
4029   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg0_off), j_rarg0->as_VMReg());
4030   map->set_callee_saved(VMRegImpl::stack2reg(j_farg0_off), j_farg0->as_VMReg());
4031   map->set_callee_saved(VMRegImpl::stack2reg(j_farg1_off), j_farg1->as_VMReg());
4032   map->set_callee_saved(VMRegImpl::stack2reg(j_farg2_off), j_farg2->as_VMReg());
4033   map->set_callee_saved(VMRegImpl::stack2reg(j_farg3_off), j_farg3->as_VMReg());
4034   map->set_callee_saved(VMRegImpl::stack2reg(j_farg4_off), j_farg4->as_VMReg());
4035   map->set_callee_saved(VMRegImpl::stack2reg(j_farg5_off), j_farg5->as_VMReg());
4036   map->set_callee_saved(VMRegImpl::stack2reg(j_farg6_off), j_farg6->as_VMReg());
4037   map->set_callee_saved(VMRegImpl::stack2reg(j_farg7_off), j_farg7->as_VMReg());
4038 
4039   int start = __ offset();
4040 
4041   __ subptr(rsp, frame_size_in_bytes - 8 /* return address*/);
4042 
4043   __ movptr(Address(rsp, rbp_off * BytesPerInt), rbp);
4044   __ movdbl(Address(rsp, j_farg7_off * BytesPerInt), j_farg7);
4045   __ movdbl(Address(rsp, j_farg6_off * BytesPerInt), j_farg6);
4046   __ movdbl(Address(rsp, j_farg5_off * BytesPerInt), j_farg5);
4047   __ movdbl(Address(rsp, j_farg4_off * BytesPerInt), j_farg4);
4048   __ movdbl(Address(rsp, j_farg3_off * BytesPerInt), j_farg3);
4049   __ movdbl(Address(rsp, j_farg2_off * BytesPerInt), j_farg2);
4050   __ movdbl(Address(rsp, j_farg1_off * BytesPerInt), j_farg1);
4051   __ movdbl(Address(rsp, j_farg0_off * BytesPerInt), j_farg0);
4052 
4053   __ movptr(Address(rsp, j_rarg0_off * BytesPerInt), j_rarg0);
4054   __ movptr(Address(rsp, j_rarg1_off * BytesPerInt), j_rarg1);
4055   __ movptr(Address(rsp, j_rarg2_off * BytesPerInt), j_rarg2);
4056   __ movptr(Address(rsp, j_rarg3_off * BytesPerInt), j_rarg3);
4057   __ movptr(Address(rsp, j_rarg4_off * BytesPerInt), j_rarg4);
4058   __ movptr(Address(rsp, j_rarg5_off * BytesPerInt), j_rarg5);
4059   __ movptr(Address(rsp, rax_off * BytesPerInt), rax);
4060 
4061   int frame_complete = __ offset();
4062 
4063   __ set_last_Java_frame(noreg, noreg, nullptr, rscratch1);
4064 
4065   __ mov(c_rarg0, r15_thread);
4066   __ mov(c_rarg1, rax);
4067 
4068   __ call(RuntimeAddress(destination));
4069 
4070   // Set an oopmap for the call site.
4071 
4072   oop_maps->add_gc_map( __ offset() - start, map);
4073 
4074   // clear last_Java_sp
4075   __ reset_last_Java_frame(false);
4076 
4077   __ movptr(rbp, Address(rsp, rbp_off * BytesPerInt));
4078   __ movdbl(j_farg7, Address(rsp, j_farg7_off * BytesPerInt));
4079   __ movdbl(j_farg6, Address(rsp, j_farg6_off * BytesPerInt));
4080   __ movdbl(j_farg5, Address(rsp, j_farg5_off * BytesPerInt));
4081   __ movdbl(j_farg4, Address(rsp, j_farg4_off * BytesPerInt));
4082   __ movdbl(j_farg3, Address(rsp, j_farg3_off * BytesPerInt));
4083   __ movdbl(j_farg2, Address(rsp, j_farg2_off * BytesPerInt));
4084   __ movdbl(j_farg1, Address(rsp, j_farg1_off * BytesPerInt));
4085   __ movdbl(j_farg0, Address(rsp, j_farg0_off * BytesPerInt));
4086 
4087   __ movptr(j_rarg0, Address(rsp, j_rarg0_off * BytesPerInt));
4088   __ movptr(j_rarg1, Address(rsp, j_rarg1_off * BytesPerInt));
4089   __ movptr(j_rarg2, Address(rsp, j_rarg2_off * BytesPerInt));
4090   __ movptr(j_rarg3, Address(rsp, j_rarg3_off * BytesPerInt));
4091   __ movptr(j_rarg4, Address(rsp, j_rarg4_off * BytesPerInt));
4092   __ movptr(j_rarg5, Address(rsp, j_rarg5_off * BytesPerInt));
4093   __ movptr(rax, Address(rsp, rax_off * BytesPerInt));
4094 
4095   __ addptr(rsp, frame_size_in_bytes-8);
4096 
4097   // check for pending exceptions
4098   Label pending;
4099   __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
4100   __ jcc(Assembler::notEqual, pending);
4101 
4102   if (has_res) {
4103     __ get_vm_result(rax, r15_thread);
4104   }
4105 
4106   __ ret(0);
4107 
4108   __ bind(pending);
4109 
4110   __ movptr(rax, Address(r15_thread, Thread::pending_exception_offset()));
4111   __ jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
4112 
4113   // -------------
4114   // make sure all code is generated
4115   _masm->flush();
4116 
4117   RuntimeStub* stub = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, false);
4118   return stub->entry_point();
4119 }
4120 
4121 void StubGenerator::generate_continuation_stubs() {
4122   // Continuation stubs:
4123   StubRoutines::_cont_thaw          = generate_cont_thaw();
4124   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4125   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4126 }
4127 
4128 void StubGenerator::generate_final_stubs() {
4129   // Generates the rest of stubs and initializes the entry points
4130 
4131   // support for verify_oop (must happen after universe_init)
4132   if (VerifyOops) {
4133     StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
4134   }
4135 
4136   // data cache line writeback
4137   StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
4138   StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
4139 
4140   // arraycopy stubs used by compilers
< prev index next >