< prev index next >

src/hotspot/cpu/x86/stubGenerator_x86_64.cpp

Print this page

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"

  26 #include "asm/macroAssembler.hpp"
  27 #include "classfile/javaClasses.hpp"
  28 #include "classfile/vmIntrinsics.hpp"
  29 #include "compiler/oopMap.hpp"
  30 #include "gc/shared/barrierSet.hpp"
  31 #include "gc/shared/barrierSetAssembler.hpp"
  32 #include "gc/shared/barrierSetNMethod.hpp"
  33 #include "gc/shared/gc_globals.hpp"
  34 #include "memory/universe.hpp"
  35 #include "prims/jvmtiExport.hpp"
  36 #include "prims/upcallLinker.hpp"
  37 #include "runtime/arguments.hpp"
  38 #include "runtime/continuationEntry.hpp"
  39 #include "runtime/javaThread.hpp"
  40 #include "runtime/sharedRuntime.hpp"
  41 #include "runtime/stubRoutines.hpp"


  42 #include "stubGenerator_x86_64.hpp"
  43 #ifdef COMPILER2
  44 #include "opto/runtime.hpp"
  45 #include "opto/c2_globals.hpp"
  46 #endif
  47 #if INCLUDE_JVMCI
  48 #include "jvmci/jvmci_globals.hpp"
  49 #endif
  50 
  51 // For a more detailed description of the stub routine structure
  52 // see the comment in stubRoutines.hpp
  53 
  54 #define __ _masm->
  55 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
  56 
  57 #ifdef PRODUCT
  58 #define BLOCK_COMMENT(str) /* nothing */
  59 #else
  60 #define BLOCK_COMMENT(str) __ block_comment(str)
  61 #endif // PRODUCT

 287   __ BIND(loop);
 288   __ movptr(rax, Address(c_rarg2, 0));// get parameter
 289   __ addptr(c_rarg2, wordSize);       // advance to next parameter
 290   __ decrementl(c_rarg1);             // decrement counter
 291   __ push(rax);                       // pass parameter
 292   __ jcc(Assembler::notZero, loop);
 293 
 294   // call Java function
 295   __ BIND(parameters_done);
 296   __ movptr(rbx, method);             // get Method*
 297   __ movptr(c_rarg1, entry_point);    // get entry_point
 298   __ mov(r13, rsp);                   // set sender sp
 299   BLOCK_COMMENT("call Java function");
 300   __ call(c_rarg1);
 301 
 302   BLOCK_COMMENT("call_stub_return_address:");
 303   return_address = __ pc();
 304 
 305   // store result depending on type (everything that is not
 306   // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
 307   __ movptr(c_rarg0, result);
 308   Label is_long, is_float, is_double, exit;
 309   __ movl(c_rarg1, result_type);
 310   __ cmpl(c_rarg1, T_OBJECT);


 311   __ jcc(Assembler::equal, is_long);
 312   __ cmpl(c_rarg1, T_LONG);
 313   __ jcc(Assembler::equal, is_long);
 314   __ cmpl(c_rarg1, T_FLOAT);
 315   __ jcc(Assembler::equal, is_float);
 316   __ cmpl(c_rarg1, T_DOUBLE);
 317   __ jcc(Assembler::equal, is_double);
 318 #ifdef ASSERT
 319   // make sure the type is INT
 320   {
 321     Label L;
 322     __ cmpl(c_rarg1, T_INT);
 323     __ jcc(Assembler::equal, L);
 324     __ stop("StubRoutines::call_stub: unexpected result type");
 325     __ bind(L);
 326   }
 327 #endif
 328 
 329   // handle T_INT case
 330   __ movl(Address(c_rarg0, 0), rax);
 331 
 332   __ BIND(exit);
 333 
 334   // pop parameters
 335   __ lea(rsp, rsp_after_call);
 336 
 337 #ifdef ASSERT
 338   // verify that threads correspond
 339   {
 340    Label L1, L2, L3;
 341     __ cmpptr(r15_thread, thread);
 342     __ jcc(Assembler::equal, L1);
 343     __ stop("StubRoutines::call_stub: r15_thread is corrupted");
 344     __ bind(L1);
 345     __ get_thread(rbx);
 346     __ cmpptr(r15_thread, thread);
 347     __ jcc(Assembler::equal, L2);
 348     __ stop("StubRoutines::call_stub: r15_thread is modified by call");
 349     __ bind(L2);
 350     __ cmpptr(r15_thread, rbx);

 368   __ movptr(r13, r13_save);
 369   __ movptr(r12, r12_save);
 370   __ movptr(rbx, rbx_save);
 371 
 372 #ifdef _WIN64
 373   __ movptr(rdi, rdi_save);
 374   __ movptr(rsi, rsi_save);
 375 #else
 376   __ ldmxcsr(mxcsr_save);
 377 #endif
 378 
 379   // restore rsp
 380   __ addptr(rsp, -rsp_after_call_off * wordSize);
 381 
 382   // return
 383   __ vzeroupper();
 384   __ pop(rbp);
 385   __ ret(0);
 386 
 387   // handle return types different from T_INT













 388   __ BIND(is_long);
 389   __ movq(Address(c_rarg0, 0), rax);
 390   __ jmp(exit);
 391 
 392   __ BIND(is_float);
 393   __ movflt(Address(c_rarg0, 0), xmm0);
 394   __ jmp(exit);
 395 
 396   __ BIND(is_double);
 397   __ movdbl(Address(c_rarg0, 0), xmm0);
 398   __ jmp(exit);
 399 
 400   return start;
 401 }
 402 
 403 // Return point for a Java call if there's an exception thrown in
 404 // Java code.  The exception is caught and transformed into a
 405 // pending exception stored in JavaThread that can be tested from
 406 // within the VM.
 407 //
 408 // Note: Usually the parameters are removed by the callee. In case
 409 // of an exception crossing an activation frame boundary, that is
 410 // not the case if the callee is compiled code => need to setup the
 411 // rsp.
 412 //
 413 // rax: exception oop
 414 
 415 address StubGenerator::generate_catch_exception() {
 416   StubCodeMark mark(this, "StubRoutines", "catch_exception");
 417   address start = __ pc();

3910 // Initialization
3911 void StubGenerator::generate_initial_stubs() {
3912   // Generates all stubs and initializes the entry points
3913 
3914   // This platform-specific settings are needed by generate_call_stub()
3915   create_control_words();
3916 
3917   // Initialize table for unsafe copy memeory check.
3918   if (UnsafeMemoryAccess::_table == nullptr) {
3919     UnsafeMemoryAccess::create_table(16 + 4); // 16 for copyMemory; 4 for setMemory
3920   }
3921 
3922   // entry points that exist in all platforms Note: This is code
3923   // that could be shared among different platforms - however the
3924   // benefit seems to be smaller than the disadvantage of having a
3925   // much more complicated generator structure. See also comment in
3926   // stubRoutines.hpp.
3927 
3928   StubRoutines::_forward_exception_entry = generate_forward_exception();
3929 










3930   StubRoutines::_call_stub_entry =
3931     generate_call_stub(StubRoutines::_call_stub_return_address);
3932 
3933   // is referenced by megamorphic call
3934   StubRoutines::_catch_exception_entry = generate_catch_exception();
3935 
3936   // atomic calls
3937   StubRoutines::_fence_entry                = generate_orderaccess_fence();
3938 
3939   // platform dependent
3940   StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3941 
3942   StubRoutines::x86::_verify_mxcsr_entry    = generate_verify_mxcsr();
3943 
3944   StubRoutines::x86::_f2i_fixup             = generate_f2i_fixup();
3945   StubRoutines::x86::_f2l_fixup             = generate_f2l_fixup();
3946   StubRoutines::x86::_d2i_fixup             = generate_d2i_fixup();
3947   StubRoutines::x86::_d2l_fixup             = generate_d2l_fixup();
3948 
3949   StubRoutines::x86::_float_sign_mask       = generate_fp_mask("float_sign_mask",  0x7FFFFFFF7FFFFFFF);

3962     StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3963     StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3964     StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3965   }
3966 
3967   if (VM_Version::supports_float16()) {
3968     // For results consistency both intrinsics should be enabled.
3969     // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3970     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3971         vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3972       StubRoutines::_hf2f = generate_float16ToFloat();
3973       StubRoutines::_f2hf = generate_floatToFloat16();
3974     }
3975   }
3976 
3977   generate_libm_stubs();
3978 
3979   StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
3980 }
3981 
















































































































































3982 void StubGenerator::generate_continuation_stubs() {
3983   // Continuation stubs:
3984   StubRoutines::_cont_thaw          = generate_cont_thaw();
3985   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3986   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3987   StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
3988 }
3989 
3990 void StubGenerator::generate_final_stubs() {
3991   // Generates the rest of stubs and initializes the entry points
3992 
3993   // support for verify_oop (must happen after universe_init)
3994   if (VerifyOops) {
3995     StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
3996   }
3997 
3998   // data cache line writeback
3999   StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
4000   StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
4001 

   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/assembler.hpp"
  27 #include "asm/macroAssembler.hpp"
  28 #include "classfile/javaClasses.hpp"
  29 #include "classfile/vmIntrinsics.hpp"
  30 #include "compiler/oopMap.hpp"
  31 #include "gc/shared/barrierSet.hpp"
  32 #include "gc/shared/barrierSetAssembler.hpp"
  33 #include "gc/shared/barrierSetNMethod.hpp"
  34 #include "gc/shared/gc_globals.hpp"
  35 #include "memory/universe.hpp"
  36 #include "prims/jvmtiExport.hpp"
  37 #include "prims/upcallLinker.hpp"
  38 #include "runtime/arguments.hpp"
  39 #include "runtime/continuationEntry.hpp"
  40 #include "runtime/javaThread.hpp"
  41 #include "runtime/sharedRuntime.hpp"
  42 #include "runtime/stubRoutines.hpp"
  43 #include "utilities/macros.hpp"
  44 #include "vmreg_x86.inline.hpp"
  45 #include "stubGenerator_x86_64.hpp"
  46 #ifdef COMPILER2
  47 #include "opto/runtime.hpp"
  48 #include "opto/c2_globals.hpp"
  49 #endif
  50 #if INCLUDE_JVMCI
  51 #include "jvmci/jvmci_globals.hpp"
  52 #endif
  53 
  54 // For a more detailed description of the stub routine structure
  55 // see the comment in stubRoutines.hpp
  56 
  57 #define __ _masm->
  58 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
  59 
  60 #ifdef PRODUCT
  61 #define BLOCK_COMMENT(str) /* nothing */
  62 #else
  63 #define BLOCK_COMMENT(str) __ block_comment(str)
  64 #endif // PRODUCT

 290   __ BIND(loop);
 291   __ movptr(rax, Address(c_rarg2, 0));// get parameter
 292   __ addptr(c_rarg2, wordSize);       // advance to next parameter
 293   __ decrementl(c_rarg1);             // decrement counter
 294   __ push(rax);                       // pass parameter
 295   __ jcc(Assembler::notZero, loop);
 296 
 297   // call Java function
 298   __ BIND(parameters_done);
 299   __ movptr(rbx, method);             // get Method*
 300   __ movptr(c_rarg1, entry_point);    // get entry_point
 301   __ mov(r13, rsp);                   // set sender sp
 302   BLOCK_COMMENT("call Java function");
 303   __ call(c_rarg1);
 304 
 305   BLOCK_COMMENT("call_stub_return_address:");
 306   return_address = __ pc();
 307 
 308   // store result depending on type (everything that is not
 309   // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
 310   __ movptr(r13, result);
 311   Label is_long, is_float, is_double, check_prim, exit;
 312   __ movl(rbx, result_type);
 313   __ cmpl(rbx, T_OBJECT);
 314   __ jcc(Assembler::equal, check_prim);
 315   __ cmpl(rbx, T_LONG);
 316   __ jcc(Assembler::equal, is_long);
 317   __ cmpl(rbx, T_FLOAT);


 318   __ jcc(Assembler::equal, is_float);
 319   __ cmpl(rbx, T_DOUBLE);
 320   __ jcc(Assembler::equal, is_double);
 321 #ifdef ASSERT
 322   // make sure the type is INT
 323   {
 324     Label L;
 325     __ cmpl(rbx, T_INT);
 326     __ jcc(Assembler::equal, L);
 327     __ stop("StubRoutines::call_stub: unexpected result type");
 328     __ bind(L);
 329   }
 330 #endif
 331 
 332   // handle T_INT case
 333   __ movl(Address(r13, 0), rax);
 334 
 335   __ BIND(exit);
 336 
 337   // pop parameters
 338   __ lea(rsp, rsp_after_call);
 339 
 340 #ifdef ASSERT
 341   // verify that threads correspond
 342   {
 343    Label L1, L2, L3;
 344     __ cmpptr(r15_thread, thread);
 345     __ jcc(Assembler::equal, L1);
 346     __ stop("StubRoutines::call_stub: r15_thread is corrupted");
 347     __ bind(L1);
 348     __ get_thread(rbx);
 349     __ cmpptr(r15_thread, thread);
 350     __ jcc(Assembler::equal, L2);
 351     __ stop("StubRoutines::call_stub: r15_thread is modified by call");
 352     __ bind(L2);
 353     __ cmpptr(r15_thread, rbx);

 371   __ movptr(r13, r13_save);
 372   __ movptr(r12, r12_save);
 373   __ movptr(rbx, rbx_save);
 374 
 375 #ifdef _WIN64
 376   __ movptr(rdi, rdi_save);
 377   __ movptr(rsi, rsi_save);
 378 #else
 379   __ ldmxcsr(mxcsr_save);
 380 #endif
 381 
 382   // restore rsp
 383   __ addptr(rsp, -rsp_after_call_off * wordSize);
 384 
 385   // return
 386   __ vzeroupper();
 387   __ pop(rbp);
 388   __ ret(0);
 389 
 390   // handle return types different from T_INT
 391   __ BIND(check_prim);
 392   if (InlineTypeReturnedAsFields) {
 393     // Check for scalarized return value
 394     __ testptr(rax, 1);
 395     __ jcc(Assembler::zero, is_long);
 396     // Load pack handler address
 397     __ andptr(rax, -2);
 398     __ movptr(rax, Address(rax, InstanceKlass::adr_inlineklass_fixed_block_offset()));
 399     __ movptr(rbx, Address(rax, InlineKlass::pack_handler_jobject_offset()));
 400     // Call pack handler to initialize the buffer
 401     __ call(rbx);
 402     __ jmp(exit);
 403   }
 404   __ BIND(is_long);
 405   __ movq(Address(r13, 0), rax);
 406   __ jmp(exit);
 407 
 408   __ BIND(is_float);
 409   __ movflt(Address(r13, 0), xmm0);
 410   __ jmp(exit);
 411 
 412   __ BIND(is_double);
 413   __ movdbl(Address(r13, 0), xmm0);
 414   __ jmp(exit);
 415 
 416   return start;
 417 }
 418 
 419 // Return point for a Java call if there's an exception thrown in
 420 // Java code.  The exception is caught and transformed into a
 421 // pending exception stored in JavaThread that can be tested from
 422 // within the VM.
 423 //
 424 // Note: Usually the parameters are removed by the callee. In case
 425 // of an exception crossing an activation frame boundary, that is
 426 // not the case if the callee is compiled code => need to setup the
 427 // rsp.
 428 //
 429 // rax: exception oop
 430 
 431 address StubGenerator::generate_catch_exception() {
 432   StubCodeMark mark(this, "StubRoutines", "catch_exception");
 433   address start = __ pc();

3926 // Initialization
3927 void StubGenerator::generate_initial_stubs() {
3928   // Generates all stubs and initializes the entry points
3929 
3930   // This platform-specific settings are needed by generate_call_stub()
3931   create_control_words();
3932 
3933   // Initialize table for unsafe copy memeory check.
3934   if (UnsafeMemoryAccess::_table == nullptr) {
3935     UnsafeMemoryAccess::create_table(16 + 4); // 16 for copyMemory; 4 for setMemory
3936   }
3937 
3938   // entry points that exist in all platforms Note: This is code
3939   // that could be shared among different platforms - however the
3940   // benefit seems to be smaller than the disadvantage of having a
3941   // much more complicated generator structure. See also comment in
3942   // stubRoutines.hpp.
3943 
3944   StubRoutines::_forward_exception_entry = generate_forward_exception();
3945 
3946   // Generate these first because they are called from other stubs
3947   if (InlineTypeReturnedAsFields) {
3948     StubRoutines::_load_inline_type_fields_in_regs =
3949       generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::load_inline_type_fields_in_regs),
3950                                  "load_inline_type_fields_in_regs", false);
3951     StubRoutines::_store_inline_type_fields_to_buf =
3952       generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::store_inline_type_fields_to_buf),
3953                                  "store_inline_type_fields_to_buf", true);
3954   }
3955 
3956   StubRoutines::_call_stub_entry =
3957     generate_call_stub(StubRoutines::_call_stub_return_address);
3958 
3959   // is referenced by megamorphic call
3960   StubRoutines::_catch_exception_entry = generate_catch_exception();
3961 
3962   // atomic calls
3963   StubRoutines::_fence_entry                = generate_orderaccess_fence();
3964 
3965   // platform dependent
3966   StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3967 
3968   StubRoutines::x86::_verify_mxcsr_entry    = generate_verify_mxcsr();
3969 
3970   StubRoutines::x86::_f2i_fixup             = generate_f2i_fixup();
3971   StubRoutines::x86::_f2l_fixup             = generate_f2l_fixup();
3972   StubRoutines::x86::_d2i_fixup             = generate_d2i_fixup();
3973   StubRoutines::x86::_d2l_fixup             = generate_d2l_fixup();
3974 
3975   StubRoutines::x86::_float_sign_mask       = generate_fp_mask("float_sign_mask",  0x7FFFFFFF7FFFFFFF);

3988     StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3989     StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3990     StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3991   }
3992 
3993   if (VM_Version::supports_float16()) {
3994     // For results consistency both intrinsics should be enabled.
3995     // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3996     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3997         vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3998       StubRoutines::_hf2f = generate_float16ToFloat();
3999       StubRoutines::_f2hf = generate_floatToFloat16();
4000     }
4001   }
4002 
4003   generate_libm_stubs();
4004 
4005   StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
4006 }
4007 
4008 // Call here from the interpreter or compiled code to either load
4009 // multiple returned values from the inline type instance being
4010 // returned to registers or to store returned values to a newly
4011 // allocated inline type instance.
4012 // Register is a class, but it would be assigned numerical value.
4013 // "0" is assigned for xmm0. Thus we need to ignore -Wnonnull.
4014 PRAGMA_DIAG_PUSH
4015 PRAGMA_NONNULL_IGNORED
4016 address StubGenerator::generate_return_value_stub(address destination, const char* name, bool has_res) {
4017   // We need to save all registers the calling convention may use so
4018   // the runtime calls read or update those registers. This needs to
4019   // be in sync with SharedRuntime::java_return_convention().
4020   enum layout {
4021     pad_off = frame::arg_reg_save_area_bytes/BytesPerInt, pad_off_2,
4022     rax_off, rax_off_2,
4023     j_rarg5_off, j_rarg5_2,
4024     j_rarg4_off, j_rarg4_2,
4025     j_rarg3_off, j_rarg3_2,
4026     j_rarg2_off, j_rarg2_2,
4027     j_rarg1_off, j_rarg1_2,
4028     j_rarg0_off, j_rarg0_2,
4029     j_farg0_off, j_farg0_2,
4030     j_farg1_off, j_farg1_2,
4031     j_farg2_off, j_farg2_2,
4032     j_farg3_off, j_farg3_2,
4033     j_farg4_off, j_farg4_2,
4034     j_farg5_off, j_farg5_2,
4035     j_farg6_off, j_farg6_2,
4036     j_farg7_off, j_farg7_2,
4037     rbp_off, rbp_off_2,
4038     return_off, return_off_2,
4039 
4040     framesize
4041   };
4042 
4043   CodeBuffer buffer(name, 1000, 512);
4044   MacroAssembler* _masm = new MacroAssembler(&buffer);
4045 
4046   int frame_size_in_bytes = align_up(framesize*BytesPerInt, 16);
4047   assert(frame_size_in_bytes == framesize*BytesPerInt, "misaligned");
4048   int frame_size_in_slots = frame_size_in_bytes / BytesPerInt;
4049   int frame_size_in_words = frame_size_in_bytes / wordSize;
4050 
4051   OopMapSet *oop_maps = new OopMapSet();
4052   OopMap* map = new OopMap(frame_size_in_slots, 0);
4053 
4054   map->set_callee_saved(VMRegImpl::stack2reg(rax_off), rax->as_VMReg());
4055   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg5_off), j_rarg5->as_VMReg());
4056   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg4_off), j_rarg4->as_VMReg());
4057   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg3_off), j_rarg3->as_VMReg());
4058   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg2_off), j_rarg2->as_VMReg());
4059   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg1_off), j_rarg1->as_VMReg());
4060   map->set_callee_saved(VMRegImpl::stack2reg(j_rarg0_off), j_rarg0->as_VMReg());
4061   map->set_callee_saved(VMRegImpl::stack2reg(j_farg0_off), j_farg0->as_VMReg());
4062   map->set_callee_saved(VMRegImpl::stack2reg(j_farg1_off), j_farg1->as_VMReg());
4063   map->set_callee_saved(VMRegImpl::stack2reg(j_farg2_off), j_farg2->as_VMReg());
4064   map->set_callee_saved(VMRegImpl::stack2reg(j_farg3_off), j_farg3->as_VMReg());
4065   map->set_callee_saved(VMRegImpl::stack2reg(j_farg4_off), j_farg4->as_VMReg());
4066   map->set_callee_saved(VMRegImpl::stack2reg(j_farg5_off), j_farg5->as_VMReg());
4067   map->set_callee_saved(VMRegImpl::stack2reg(j_farg6_off), j_farg6->as_VMReg());
4068   map->set_callee_saved(VMRegImpl::stack2reg(j_farg7_off), j_farg7->as_VMReg());
4069 
4070   int start = __ offset();
4071 
4072   __ subptr(rsp, frame_size_in_bytes - 8 /* return address*/);
4073 
4074   __ movptr(Address(rsp, rbp_off * BytesPerInt), rbp);
4075   __ movdbl(Address(rsp, j_farg7_off * BytesPerInt), j_farg7);
4076   __ movdbl(Address(rsp, j_farg6_off * BytesPerInt), j_farg6);
4077   __ movdbl(Address(rsp, j_farg5_off * BytesPerInt), j_farg5);
4078   __ movdbl(Address(rsp, j_farg4_off * BytesPerInt), j_farg4);
4079   __ movdbl(Address(rsp, j_farg3_off * BytesPerInt), j_farg3);
4080   __ movdbl(Address(rsp, j_farg2_off * BytesPerInt), j_farg2);
4081   __ movdbl(Address(rsp, j_farg1_off * BytesPerInt), j_farg1);
4082   __ movdbl(Address(rsp, j_farg0_off * BytesPerInt), j_farg0);
4083 
4084   __ movptr(Address(rsp, j_rarg0_off * BytesPerInt), j_rarg0);
4085   __ movptr(Address(rsp, j_rarg1_off * BytesPerInt), j_rarg1);
4086   __ movptr(Address(rsp, j_rarg2_off * BytesPerInt), j_rarg2);
4087   __ movptr(Address(rsp, j_rarg3_off * BytesPerInt), j_rarg3);
4088   __ movptr(Address(rsp, j_rarg4_off * BytesPerInt), j_rarg4);
4089   __ movptr(Address(rsp, j_rarg5_off * BytesPerInt), j_rarg5);
4090   __ movptr(Address(rsp, rax_off * BytesPerInt), rax);
4091 
4092   int frame_complete = __ offset();
4093 
4094   __ set_last_Java_frame(noreg, noreg, nullptr, rscratch1);
4095 
4096   __ mov(c_rarg0, r15_thread);
4097   __ mov(c_rarg1, rax);
4098 
4099   __ call(RuntimeAddress(destination));
4100 
4101   // Set an oopmap for the call site.
4102 
4103   oop_maps->add_gc_map( __ offset() - start, map);
4104 
4105   // clear last_Java_sp
4106   __ reset_last_Java_frame(false);
4107 
4108   __ movptr(rbp, Address(rsp, rbp_off * BytesPerInt));
4109   __ movdbl(j_farg7, Address(rsp, j_farg7_off * BytesPerInt));
4110   __ movdbl(j_farg6, Address(rsp, j_farg6_off * BytesPerInt));
4111   __ movdbl(j_farg5, Address(rsp, j_farg5_off * BytesPerInt));
4112   __ movdbl(j_farg4, Address(rsp, j_farg4_off * BytesPerInt));
4113   __ movdbl(j_farg3, Address(rsp, j_farg3_off * BytesPerInt));
4114   __ movdbl(j_farg2, Address(rsp, j_farg2_off * BytesPerInt));
4115   __ movdbl(j_farg1, Address(rsp, j_farg1_off * BytesPerInt));
4116   __ movdbl(j_farg0, Address(rsp, j_farg0_off * BytesPerInt));
4117 
4118   __ movptr(j_rarg0, Address(rsp, j_rarg0_off * BytesPerInt));
4119   __ movptr(j_rarg1, Address(rsp, j_rarg1_off * BytesPerInt));
4120   __ movptr(j_rarg2, Address(rsp, j_rarg2_off * BytesPerInt));
4121   __ movptr(j_rarg3, Address(rsp, j_rarg3_off * BytesPerInt));
4122   __ movptr(j_rarg4, Address(rsp, j_rarg4_off * BytesPerInt));
4123   __ movptr(j_rarg5, Address(rsp, j_rarg5_off * BytesPerInt));
4124   __ movptr(rax, Address(rsp, rax_off * BytesPerInt));
4125 
4126   __ addptr(rsp, frame_size_in_bytes-8);
4127 
4128   // check for pending exceptions
4129   Label pending;
4130   __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
4131   __ jcc(Assembler::notEqual, pending);
4132 
4133   if (has_res) {
4134     __ get_vm_result(rax, r15_thread);
4135   }
4136 
4137   __ ret(0);
4138 
4139   __ bind(pending);
4140 
4141   __ movptr(rax, Address(r15_thread, Thread::pending_exception_offset()));
4142   __ jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
4143 
4144   // -------------
4145   // make sure all code is generated
4146   _masm->flush();
4147 
4148   RuntimeStub* stub = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, false);
4149   return stub->entry_point();
4150 }
4151 
4152 void StubGenerator::generate_continuation_stubs() {
4153   // Continuation stubs:
4154   StubRoutines::_cont_thaw          = generate_cont_thaw();
4155   StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4156   StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4157   StubRoutines::_cont_preempt_stub = generate_cont_preempt_stub();
4158 }
4159 
4160 void StubGenerator::generate_final_stubs() {
4161   // Generates the rest of stubs and initializes the entry points
4162 
4163   // support for verify_oop (must happen after universe_init)
4164   if (VerifyOops) {
4165     StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
4166   }
4167 
4168   // data cache line writeback
4169   StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
4170   StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
4171 
< prev index next >