6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "classfile/vmIntrinsics.hpp"
28 #include "compiler/oopMap.hpp"
29 #include "gc/shared/barrierSet.hpp"
30 #include "gc/shared/barrierSetAssembler.hpp"
31 #include "gc/shared/barrierSetNMethod.hpp"
32 #include "gc/shared/gc_globals.hpp"
33 #include "memory/universe.hpp"
34 #include "prims/jvmtiExport.hpp"
35 #include "runtime/arguments.hpp"
36 #include "runtime/javaThread.hpp"
37 #include "runtime/sharedRuntime.hpp"
38 #include "runtime/stubRoutines.hpp"
39 #include "stubGenerator_x86_64.hpp"
40 #ifdef COMPILER2
41 #include "opto/runtime.hpp"
42 #include "opto/c2_globals.hpp"
43 #endif
44 #if INCLUDE_JVMCI
45 #include "jvmci/jvmci_globals.hpp"
46 #endif
47 #if INCLUDE_JFR
48 #include "jfr/support/jfrIntrinsics.hpp"
49 #endif
50
51 // For a more detailed description of the stub routine structure
52 // see the comment in stubRoutines.hpp
53
54 #define __ _masm->
55 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
56
57 #ifdef PRODUCT
58 #define BLOCK_COMMENT(str) /* nothing */
297 __ movl(c_rarg1, c_rarg3); // parameter counter is in c_rarg1
298 __ BIND(loop);
299 __ movptr(rax, Address(c_rarg2, 0));// get parameter
300 __ addptr(c_rarg2, wordSize); // advance to next parameter
301 __ decrementl(c_rarg1); // decrement counter
302 __ push(rax); // pass parameter
303 __ jcc(Assembler::notZero, loop);
304
305 // call Java function
306 __ BIND(parameters_done);
307 __ movptr(rbx, method); // get Method*
308 __ movptr(c_rarg1, entry_point); // get entry_point
309 __ mov(r13, rsp); // set sender sp
310 BLOCK_COMMENT("call Java function");
311 __ call(c_rarg1);
312
313 BLOCK_COMMENT("call_stub_return_address:");
314 return_address = __ pc();
315
316 // store result depending on type (everything that is not
317 // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
318 __ movptr(c_rarg0, result);
319 Label is_long, is_float, is_double, exit;
320 __ movl(c_rarg1, result_type);
321 __ cmpl(c_rarg1, T_OBJECT);
322 __ jcc(Assembler::equal, is_long);
323 __ cmpl(c_rarg1, T_LONG);
324 __ jcc(Assembler::equal, is_long);
325 __ cmpl(c_rarg1, T_FLOAT);
326 __ jcc(Assembler::equal, is_float);
327 __ cmpl(c_rarg1, T_DOUBLE);
328 __ jcc(Assembler::equal, is_double);
329
330 // handle T_INT case
331 __ movl(Address(c_rarg0, 0), rax);
332
333 __ BIND(exit);
334
335 // pop parameters
336 __ lea(rsp, rsp_after_call);
337
338 #ifdef ASSERT
339 // verify that threads correspond
340 {
341 Label L1, L2, L3;
342 __ cmpptr(r15_thread, thread);
343 __ jcc(Assembler::equal, L1);
344 __ stop("StubRoutines::call_stub: r15_thread is corrupted");
345 __ bind(L1);
346 __ get_thread(rbx);
347 __ cmpptr(r15_thread, thread);
348 __ jcc(Assembler::equal, L2);
349 __ stop("StubRoutines::call_stub: r15_thread is modified by call");
350 __ bind(L2);
351 __ cmpptr(r15_thread, rbx);
375 __ movptr(r13, r13_save);
376 __ movptr(r12, r12_save);
377 __ movptr(rbx, rbx_save);
378
379 #ifdef _WIN64
380 __ movptr(rdi, rdi_save);
381 __ movptr(rsi, rsi_save);
382 #else
383 __ ldmxcsr(mxcsr_save);
384 #endif
385
386 // restore rsp
387 __ addptr(rsp, -rsp_after_call_off * wordSize);
388
389 // return
390 __ vzeroupper();
391 __ pop(rbp);
392 __ ret(0);
393
394 // handle return types different from T_INT
395 __ BIND(is_long);
396 __ movq(Address(c_rarg0, 0), rax);
397 __ jmp(exit);
398
399 __ BIND(is_float);
400 __ movflt(Address(c_rarg0, 0), xmm0);
401 __ jmp(exit);
402
403 __ BIND(is_double);
404 __ movdbl(Address(c_rarg0, 0), xmm0);
405 __ jmp(exit);
406
407 return start;
408 }
409
410 // Return point for a Java call if there's an exception thrown in
411 // Java code. The exception is caught and transformed into a
412 // pending exception stored in JavaThread that can be tested from
413 // within the VM.
414 //
415 // Note: Usually the parameters are removed by the callee. In case
416 // of an exception crossing an activation frame boundary, that is
417 // not the case if the callee is compiled code => need to setup the
418 // rsp.
419 //
420 // rax: exception oop
421
422 address StubGenerator::generate_catch_exception() {
423 StubCodeMark mark(this, "StubRoutines", "catch_exception");
424 address start = __ pc();
3902 // Initialization
3903 void StubGenerator::generate_initial_stubs() {
3904 // Generates all stubs and initializes the entry points
3905
3906 // This platform-specific settings are needed by generate_call_stub()
3907 create_control_words();
3908
3909 // Initialize table for unsafe copy memeory check.
3910 if (UnsafeCopyMemory::_table == nullptr) {
3911 UnsafeCopyMemory::create_table(16);
3912 }
3913
3914 // entry points that exist in all platforms Note: This is code
3915 // that could be shared among different platforms - however the
3916 // benefit seems to be smaller than the disadvantage of having a
3917 // much more complicated generator structure. See also comment in
3918 // stubRoutines.hpp.
3919
3920 StubRoutines::_forward_exception_entry = generate_forward_exception();
3921
3922 StubRoutines::_call_stub_entry =
3923 generate_call_stub(StubRoutines::_call_stub_return_address);
3924
3925 // is referenced by megamorphic call
3926 StubRoutines::_catch_exception_entry = generate_catch_exception();
3927
3928 // atomic calls
3929 StubRoutines::_fence_entry = generate_orderaccess_fence();
3930
3931 // platform dependent
3932 StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3933
3934 StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
3935
3936 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
3937 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
3938 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
3939 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
3940
3941 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
3967 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3968 }
3969
3970 if (VM_Version::supports_float16()) {
3971 // For results consistency both intrinsics should be enabled.
3972 // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3973 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3974 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3975 StubRoutines::_hf2f = generate_float16ToFloat();
3976 StubRoutines::_f2hf = generate_floatToFloat16();
3977 }
3978 }
3979
3980 generate_libm_stubs();
3981
3982 if ((UseAVX >= 1) && (VM_Version::supports_avx512vlbwdq() || VM_Version::supports_fma())) {
3983 StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
3984 }
3985 }
3986
3987 void StubGenerator::generate_continuation_stubs() {
3988 // Continuation stubs:
3989 StubRoutines::_cont_thaw = generate_cont_thaw();
3990 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3991 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3992
3993 JFR_ONLY(generate_jfr_stubs();)
3994 }
3995
3996 #if INCLUDE_JFR
3997 void StubGenerator::generate_jfr_stubs() {
3998 StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
3999 StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
4000 StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
4001 StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
4002 }
4003 #endif
4004
4005 void StubGenerator::generate_final_stubs() {
4006 // Generates the rest of stubs and initializes the entry points
|
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.hpp"
27 #include "asm/macroAssembler.hpp"
28 #include "classfile/vmIntrinsics.hpp"
29 #include "compiler/oopMap.hpp"
30 #include "gc/shared/barrierSet.hpp"
31 #include "gc/shared/barrierSetAssembler.hpp"
32 #include "gc/shared/barrierSetNMethod.hpp"
33 #include "gc/shared/gc_globals.hpp"
34 #include "memory/universe.hpp"
35 #include "prims/jvmtiExport.hpp"
36 #include "runtime/arguments.hpp"
37 #include "runtime/javaThread.hpp"
38 #include "runtime/sharedRuntime.hpp"
39 #include "runtime/stubRoutines.hpp"
40 #include "utilities/macros.hpp"
41 #include "vmreg_x86.inline.hpp"
42 #include "stubGenerator_x86_64.hpp"
43 #ifdef COMPILER2
44 #include "opto/runtime.hpp"
45 #include "opto/c2_globals.hpp"
46 #endif
47 #if INCLUDE_JVMCI
48 #include "jvmci/jvmci_globals.hpp"
49 #endif
50 #if INCLUDE_JFR
51 #include "jfr/support/jfrIntrinsics.hpp"
52 #endif
53
54 // For a more detailed description of the stub routine structure
55 // see the comment in stubRoutines.hpp
56
57 #define __ _masm->
58 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
59
60 #ifdef PRODUCT
61 #define BLOCK_COMMENT(str) /* nothing */
300 __ movl(c_rarg1, c_rarg3); // parameter counter is in c_rarg1
301 __ BIND(loop);
302 __ movptr(rax, Address(c_rarg2, 0));// get parameter
303 __ addptr(c_rarg2, wordSize); // advance to next parameter
304 __ decrementl(c_rarg1); // decrement counter
305 __ push(rax); // pass parameter
306 __ jcc(Assembler::notZero, loop);
307
308 // call Java function
309 __ BIND(parameters_done);
310 __ movptr(rbx, method); // get Method*
311 __ movptr(c_rarg1, entry_point); // get entry_point
312 __ mov(r13, rsp); // set sender sp
313 BLOCK_COMMENT("call Java function");
314 __ call(c_rarg1);
315
316 BLOCK_COMMENT("call_stub_return_address:");
317 return_address = __ pc();
318
319 // store result depending on type (everything that is not
320 // T_OBJECT, T_PRIMITIVE_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
321 __ movptr(r13, result);
322 Label is_long, is_float, is_double, check_prim, exit;
323 __ movl(rbx, result_type);
324 __ cmpl(rbx, T_OBJECT);
325 __ jcc(Assembler::equal, check_prim);
326 __ cmpl(rbx, T_PRIMITIVE_OBJECT);
327 __ jcc(Assembler::equal, check_prim);
328 __ cmpl(rbx, T_LONG);
329 __ jcc(Assembler::equal, is_long);
330 __ cmpl(rbx, T_FLOAT);
331 __ jcc(Assembler::equal, is_float);
332 __ cmpl(rbx, T_DOUBLE);
333 __ jcc(Assembler::equal, is_double);
334
335 // handle T_INT case
336 __ movl(Address(r13, 0), rax);
337
338 __ BIND(exit);
339
340 // pop parameters
341 __ lea(rsp, rsp_after_call);
342
343 #ifdef ASSERT
344 // verify that threads correspond
345 {
346 Label L1, L2, L3;
347 __ cmpptr(r15_thread, thread);
348 __ jcc(Assembler::equal, L1);
349 __ stop("StubRoutines::call_stub: r15_thread is corrupted");
350 __ bind(L1);
351 __ get_thread(rbx);
352 __ cmpptr(r15_thread, thread);
353 __ jcc(Assembler::equal, L2);
354 __ stop("StubRoutines::call_stub: r15_thread is modified by call");
355 __ bind(L2);
356 __ cmpptr(r15_thread, rbx);
380 __ movptr(r13, r13_save);
381 __ movptr(r12, r12_save);
382 __ movptr(rbx, rbx_save);
383
384 #ifdef _WIN64
385 __ movptr(rdi, rdi_save);
386 __ movptr(rsi, rsi_save);
387 #else
388 __ ldmxcsr(mxcsr_save);
389 #endif
390
391 // restore rsp
392 __ addptr(rsp, -rsp_after_call_off * wordSize);
393
394 // return
395 __ vzeroupper();
396 __ pop(rbp);
397 __ ret(0);
398
399 // handle return types different from T_INT
400 __ BIND(check_prim);
401 if (InlineTypeReturnedAsFields) {
402 // Check for scalarized return value
403 __ testptr(rax, 1);
404 __ jcc(Assembler::zero, is_long);
405 // Load pack handler address
406 __ andptr(rax, -2);
407 __ movptr(rax, Address(rax, InstanceKlass::adr_inlineklass_fixed_block_offset()));
408 __ movptr(rbx, Address(rax, InlineKlass::pack_handler_jobject_offset()));
409 // Call pack handler to initialize the buffer
410 __ call(rbx);
411 __ jmp(exit);
412 }
413 __ BIND(is_long);
414 __ movq(Address(r13, 0), rax);
415 __ jmp(exit);
416
417 __ BIND(is_float);
418 __ movflt(Address(r13, 0), xmm0);
419 __ jmp(exit);
420
421 __ BIND(is_double);
422 __ movdbl(Address(r13, 0), xmm0);
423 __ jmp(exit);
424
425 return start;
426 }
427
428 // Return point for a Java call if there's an exception thrown in
429 // Java code. The exception is caught and transformed into a
430 // pending exception stored in JavaThread that can be tested from
431 // within the VM.
432 //
433 // Note: Usually the parameters are removed by the callee. In case
434 // of an exception crossing an activation frame boundary, that is
435 // not the case if the callee is compiled code => need to setup the
436 // rsp.
437 //
438 // rax: exception oop
439
440 address StubGenerator::generate_catch_exception() {
441 StubCodeMark mark(this, "StubRoutines", "catch_exception");
442 address start = __ pc();
3920 // Initialization
3921 void StubGenerator::generate_initial_stubs() {
3922 // Generates all stubs and initializes the entry points
3923
3924 // This platform-specific settings are needed by generate_call_stub()
3925 create_control_words();
3926
3927 // Initialize table for unsafe copy memeory check.
3928 if (UnsafeCopyMemory::_table == nullptr) {
3929 UnsafeCopyMemory::create_table(16);
3930 }
3931
3932 // entry points that exist in all platforms Note: This is code
3933 // that could be shared among different platforms - however the
3934 // benefit seems to be smaller than the disadvantage of having a
3935 // much more complicated generator structure. See also comment in
3936 // stubRoutines.hpp.
3937
3938 StubRoutines::_forward_exception_entry = generate_forward_exception();
3939
3940 // Generate these first because they are called from other stubs
3941 if (InlineTypeReturnedAsFields) {
3942 StubRoutines::_load_inline_type_fields_in_regs =
3943 generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::load_inline_type_fields_in_regs),
3944 "load_inline_type_fields_in_regs", false);
3945 StubRoutines::_store_inline_type_fields_to_buf =
3946 generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::store_inline_type_fields_to_buf),
3947 "store_inline_type_fields_to_buf", true);
3948 }
3949
3950 StubRoutines::_call_stub_entry =
3951 generate_call_stub(StubRoutines::_call_stub_return_address);
3952
3953 // is referenced by megamorphic call
3954 StubRoutines::_catch_exception_entry = generate_catch_exception();
3955
3956 // atomic calls
3957 StubRoutines::_fence_entry = generate_orderaccess_fence();
3958
3959 // platform dependent
3960 StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3961
3962 StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
3963
3964 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
3965 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
3966 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
3967 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
3968
3969 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
3995 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3996 }
3997
3998 if (VM_Version::supports_float16()) {
3999 // For results consistency both intrinsics should be enabled.
4000 // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
4001 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
4002 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
4003 StubRoutines::_hf2f = generate_float16ToFloat();
4004 StubRoutines::_f2hf = generate_floatToFloat16();
4005 }
4006 }
4007
4008 generate_libm_stubs();
4009
4010 if ((UseAVX >= 1) && (VM_Version::supports_avx512vlbwdq() || VM_Version::supports_fma())) {
4011 StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
4012 }
4013 }
4014
4015 // Call here from the interpreter or compiled code to either load
4016 // multiple returned values from the inline type instance being
4017 // returned to registers or to store returned values to a newly
4018 // allocated inline type instance.
4019 // Register is a class, but it would be assigned numerical value.
4020 // "0" is assigned for xmm0. Thus we need to ignore -Wnonnull.
4021 PRAGMA_DIAG_PUSH
4022 PRAGMA_NONNULL_IGNORED
4023 address StubGenerator::generate_return_value_stub(address destination, const char* name, bool has_res) {
4024 // We need to save all registers the calling convention may use so
4025 // the runtime calls read or update those registers. This needs to
4026 // be in sync with SharedRuntime::java_return_convention().
4027 enum layout {
4028 pad_off = frame::arg_reg_save_area_bytes/BytesPerInt, pad_off_2,
4029 rax_off, rax_off_2,
4030 j_rarg5_off, j_rarg5_2,
4031 j_rarg4_off, j_rarg4_2,
4032 j_rarg3_off, j_rarg3_2,
4033 j_rarg2_off, j_rarg2_2,
4034 j_rarg1_off, j_rarg1_2,
4035 j_rarg0_off, j_rarg0_2,
4036 j_farg0_off, j_farg0_2,
4037 j_farg1_off, j_farg1_2,
4038 j_farg2_off, j_farg2_2,
4039 j_farg3_off, j_farg3_2,
4040 j_farg4_off, j_farg4_2,
4041 j_farg5_off, j_farg5_2,
4042 j_farg6_off, j_farg6_2,
4043 j_farg7_off, j_farg7_2,
4044 rbp_off, rbp_off_2,
4045 return_off, return_off_2,
4046
4047 framesize
4048 };
4049
4050 CodeBuffer buffer(name, 1000, 512);
4051 MacroAssembler* _masm = new MacroAssembler(&buffer);
4052
4053 int frame_size_in_bytes = align_up(framesize*BytesPerInt, 16);
4054 assert(frame_size_in_bytes == framesize*BytesPerInt, "misaligned");
4055 int frame_size_in_slots = frame_size_in_bytes / BytesPerInt;
4056 int frame_size_in_words = frame_size_in_bytes / wordSize;
4057
4058 OopMapSet *oop_maps = new OopMapSet();
4059 OopMap* map = new OopMap(frame_size_in_slots, 0);
4060
4061 map->set_callee_saved(VMRegImpl::stack2reg(rax_off), rax->as_VMReg());
4062 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg5_off), j_rarg5->as_VMReg());
4063 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg4_off), j_rarg4->as_VMReg());
4064 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg3_off), j_rarg3->as_VMReg());
4065 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg2_off), j_rarg2->as_VMReg());
4066 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg1_off), j_rarg1->as_VMReg());
4067 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg0_off), j_rarg0->as_VMReg());
4068 map->set_callee_saved(VMRegImpl::stack2reg(j_farg0_off), j_farg0->as_VMReg());
4069 map->set_callee_saved(VMRegImpl::stack2reg(j_farg1_off), j_farg1->as_VMReg());
4070 map->set_callee_saved(VMRegImpl::stack2reg(j_farg2_off), j_farg2->as_VMReg());
4071 map->set_callee_saved(VMRegImpl::stack2reg(j_farg3_off), j_farg3->as_VMReg());
4072 map->set_callee_saved(VMRegImpl::stack2reg(j_farg4_off), j_farg4->as_VMReg());
4073 map->set_callee_saved(VMRegImpl::stack2reg(j_farg5_off), j_farg5->as_VMReg());
4074 map->set_callee_saved(VMRegImpl::stack2reg(j_farg6_off), j_farg6->as_VMReg());
4075 map->set_callee_saved(VMRegImpl::stack2reg(j_farg7_off), j_farg7->as_VMReg());
4076
4077 int start = __ offset();
4078
4079 __ subptr(rsp, frame_size_in_bytes - 8 /* return address*/);
4080
4081 __ movptr(Address(rsp, rbp_off * BytesPerInt), rbp);
4082 __ movdbl(Address(rsp, j_farg7_off * BytesPerInt), j_farg7);
4083 __ movdbl(Address(rsp, j_farg6_off * BytesPerInt), j_farg6);
4084 __ movdbl(Address(rsp, j_farg5_off * BytesPerInt), j_farg5);
4085 __ movdbl(Address(rsp, j_farg4_off * BytesPerInt), j_farg4);
4086 __ movdbl(Address(rsp, j_farg3_off * BytesPerInt), j_farg3);
4087 __ movdbl(Address(rsp, j_farg2_off * BytesPerInt), j_farg2);
4088 __ movdbl(Address(rsp, j_farg1_off * BytesPerInt), j_farg1);
4089 __ movdbl(Address(rsp, j_farg0_off * BytesPerInt), j_farg0);
4090
4091 __ movptr(Address(rsp, j_rarg0_off * BytesPerInt), j_rarg0);
4092 __ movptr(Address(rsp, j_rarg1_off * BytesPerInt), j_rarg1);
4093 __ movptr(Address(rsp, j_rarg2_off * BytesPerInt), j_rarg2);
4094 __ movptr(Address(rsp, j_rarg3_off * BytesPerInt), j_rarg3);
4095 __ movptr(Address(rsp, j_rarg4_off * BytesPerInt), j_rarg4);
4096 __ movptr(Address(rsp, j_rarg5_off * BytesPerInt), j_rarg5);
4097 __ movptr(Address(rsp, rax_off * BytesPerInt), rax);
4098
4099 int frame_complete = __ offset();
4100
4101 __ set_last_Java_frame(noreg, noreg, nullptr, rscratch1);
4102
4103 __ mov(c_rarg0, r15_thread);
4104 __ mov(c_rarg1, rax);
4105
4106 __ call(RuntimeAddress(destination));
4107
4108 // Set an oopmap for the call site.
4109
4110 oop_maps->add_gc_map( __ offset() - start, map);
4111
4112 // clear last_Java_sp
4113 __ reset_last_Java_frame(false);
4114
4115 __ movptr(rbp, Address(rsp, rbp_off * BytesPerInt));
4116 __ movdbl(j_farg7, Address(rsp, j_farg7_off * BytesPerInt));
4117 __ movdbl(j_farg6, Address(rsp, j_farg6_off * BytesPerInt));
4118 __ movdbl(j_farg5, Address(rsp, j_farg5_off * BytesPerInt));
4119 __ movdbl(j_farg4, Address(rsp, j_farg4_off * BytesPerInt));
4120 __ movdbl(j_farg3, Address(rsp, j_farg3_off * BytesPerInt));
4121 __ movdbl(j_farg2, Address(rsp, j_farg2_off * BytesPerInt));
4122 __ movdbl(j_farg1, Address(rsp, j_farg1_off * BytesPerInt));
4123 __ movdbl(j_farg0, Address(rsp, j_farg0_off * BytesPerInt));
4124
4125 __ movptr(j_rarg0, Address(rsp, j_rarg0_off * BytesPerInt));
4126 __ movptr(j_rarg1, Address(rsp, j_rarg1_off * BytesPerInt));
4127 __ movptr(j_rarg2, Address(rsp, j_rarg2_off * BytesPerInt));
4128 __ movptr(j_rarg3, Address(rsp, j_rarg3_off * BytesPerInt));
4129 __ movptr(j_rarg4, Address(rsp, j_rarg4_off * BytesPerInt));
4130 __ movptr(j_rarg5, Address(rsp, j_rarg5_off * BytesPerInt));
4131 __ movptr(rax, Address(rsp, rax_off * BytesPerInt));
4132
4133 __ addptr(rsp, frame_size_in_bytes-8);
4134
4135 // check for pending exceptions
4136 Label pending;
4137 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
4138 __ jcc(Assembler::notEqual, pending);
4139
4140 if (has_res) {
4141 __ get_vm_result(rax, r15_thread);
4142 }
4143
4144 __ ret(0);
4145
4146 __ bind(pending);
4147
4148 __ movptr(rax, Address(r15_thread, Thread::pending_exception_offset()));
4149 __ jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
4150
4151 // -------------
4152 // make sure all code is generated
4153 _masm->flush();
4154
4155 RuntimeStub* stub = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, false);
4156 return stub->entry_point();
4157 }
4158
4159 void StubGenerator::generate_continuation_stubs() {
4160 // Continuation stubs:
4161 StubRoutines::_cont_thaw = generate_cont_thaw();
4162 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4163 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4164
4165 JFR_ONLY(generate_jfr_stubs();)
4166 }
4167
4168 #if INCLUDE_JFR
4169 void StubGenerator::generate_jfr_stubs() {
4170 StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();
4171 StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();
4172 StubRoutines::_jfr_return_lease_stub = generate_jfr_return_lease();
4173 StubRoutines::_jfr_return_lease = StubRoutines::_jfr_return_lease_stub->entry_point();
4174 }
4175 #endif
4176
4177 void StubGenerator::generate_final_stubs() {
4178 // Generates the rest of stubs and initializes the entry points
|