6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.hpp"
27 #include "classfile/javaClasses.hpp"
28 #include "classfile/vmIntrinsics.hpp"
29 #include "compiler/oopMap.hpp"
30 #include "gc/shared/barrierSet.hpp"
31 #include "gc/shared/barrierSetAssembler.hpp"
32 #include "gc/shared/barrierSetNMethod.hpp"
33 #include "gc/shared/gc_globals.hpp"
34 #include "memory/universe.hpp"
35 #include "prims/jvmtiExport.hpp"
36 #include "prims/upcallLinker.hpp"
37 #include "runtime/arguments.hpp"
38 #include "runtime/javaThread.hpp"
39 #include "runtime/sharedRuntime.hpp"
40 #include "runtime/stubRoutines.hpp"
41 #include "stubGenerator_x86_64.hpp"
42 #ifdef COMPILER2
43 #include "opto/runtime.hpp"
44 #include "opto/c2_globals.hpp"
45 #endif
46 #if INCLUDE_JVMCI
47 #include "jvmci/jvmci_globals.hpp"
48 #endif
49
50 // For a more detailed description of the stub routine structure
51 // see the comment in stubRoutines.hpp
52
53 #define __ _masm->
54 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
55
56 #ifdef PRODUCT
57 #define BLOCK_COMMENT(str) /* nothing */
58 #else
59 #define BLOCK_COMMENT(str) __ block_comment(str)
60 #endif // PRODUCT
286 __ BIND(loop);
287 __ movptr(rax, Address(c_rarg2, 0));// get parameter
288 __ addptr(c_rarg2, wordSize); // advance to next parameter
289 __ decrementl(c_rarg1); // decrement counter
290 __ push(rax); // pass parameter
291 __ jcc(Assembler::notZero, loop);
292
293 // call Java function
294 __ BIND(parameters_done);
295 __ movptr(rbx, method); // get Method*
296 __ movptr(c_rarg1, entry_point); // get entry_point
297 __ mov(r13, rsp); // set sender sp
298 BLOCK_COMMENT("call Java function");
299 __ call(c_rarg1);
300
301 BLOCK_COMMENT("call_stub_return_address:");
302 return_address = __ pc();
303
304 // store result depending on type (everything that is not
305 // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
306 __ movptr(c_rarg0, result);
307 Label is_long, is_float, is_double, exit;
308 __ movl(c_rarg1, result_type);
309 __ cmpl(c_rarg1, T_OBJECT);
310 __ jcc(Assembler::equal, is_long);
311 __ cmpl(c_rarg1, T_LONG);
312 __ jcc(Assembler::equal, is_long);
313 __ cmpl(c_rarg1, T_FLOAT);
314 __ jcc(Assembler::equal, is_float);
315 __ cmpl(c_rarg1, T_DOUBLE);
316 __ jcc(Assembler::equal, is_double);
317 #ifdef ASSERT
318 // make sure the type is INT
319 {
320 Label L;
321 __ cmpl(c_rarg1, T_INT);
322 __ jcc(Assembler::equal, L);
323 __ stop("StubRoutines::call_stub: unexpected result type");
324 __ bind(L);
325 }
326 #endif
327
328 // handle T_INT case
329 __ movl(Address(c_rarg0, 0), rax);
330
331 __ BIND(exit);
332
333 // pop parameters
334 __ lea(rsp, rsp_after_call);
335
336 #ifdef ASSERT
337 // verify that threads correspond
338 {
339 Label L1, L2, L3;
340 __ cmpptr(r15_thread, thread);
341 __ jcc(Assembler::equal, L1);
342 __ stop("StubRoutines::call_stub: r15_thread is corrupted");
343 __ bind(L1);
344 __ get_thread(rbx);
345 __ cmpptr(r15_thread, thread);
346 __ jcc(Assembler::equal, L2);
347 __ stop("StubRoutines::call_stub: r15_thread is modified by call");
348 __ bind(L2);
349 __ cmpptr(r15_thread, rbx);
367 __ movptr(r13, r13_save);
368 __ movptr(r12, r12_save);
369 __ movptr(rbx, rbx_save);
370
371 #ifdef _WIN64
372 __ movptr(rdi, rdi_save);
373 __ movptr(rsi, rsi_save);
374 #else
375 __ ldmxcsr(mxcsr_save);
376 #endif
377
378 // restore rsp
379 __ addptr(rsp, -rsp_after_call_off * wordSize);
380
381 // return
382 __ vzeroupper();
383 __ pop(rbp);
384 __ ret(0);
385
386 // handle return types different from T_INT
387 __ BIND(is_long);
388 __ movq(Address(c_rarg0, 0), rax);
389 __ jmp(exit);
390
391 __ BIND(is_float);
392 __ movflt(Address(c_rarg0, 0), xmm0);
393 __ jmp(exit);
394
395 __ BIND(is_double);
396 __ movdbl(Address(c_rarg0, 0), xmm0);
397 __ jmp(exit);
398
399 return start;
400 }
401
402 // Return point for a Java call if there's an exception thrown in
403 // Java code. The exception is caught and transformed into a
404 // pending exception stored in JavaThread that can be tested from
405 // within the VM.
406 //
407 // Note: Usually the parameters are removed by the callee. In case
408 // of an exception crossing an activation frame boundary, that is
409 // not the case if the callee is compiled code => need to setup the
410 // rsp.
411 //
412 // rax: exception oop
413
414 address StubGenerator::generate_catch_exception() {
415 StubCodeMark mark(this, "StubRoutines", "catch_exception");
416 address start = __ pc();
3877 // Initialization
3878 void StubGenerator::generate_initial_stubs() {
3879 // Generates all stubs and initializes the entry points
3880
3881 // This platform-specific settings are needed by generate_call_stub()
3882 create_control_words();
3883
3884 // Initialize table for unsafe copy memeory check.
3885 if (UnsafeMemoryAccess::_table == nullptr) {
3886 UnsafeMemoryAccess::create_table(16 + 4); // 16 for copyMemory; 4 for setMemory
3887 }
3888
3889 // entry points that exist in all platforms Note: This is code
3890 // that could be shared among different platforms - however the
3891 // benefit seems to be smaller than the disadvantage of having a
3892 // much more complicated generator structure. See also comment in
3893 // stubRoutines.hpp.
3894
3895 StubRoutines::_forward_exception_entry = generate_forward_exception();
3896
3897 StubRoutines::_call_stub_entry =
3898 generate_call_stub(StubRoutines::_call_stub_return_address);
3899
3900 // is referenced by megamorphic call
3901 StubRoutines::_catch_exception_entry = generate_catch_exception();
3902
3903 // atomic calls
3904 StubRoutines::_fence_entry = generate_orderaccess_fence();
3905
3906 // platform dependent
3907 StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3908
3909 StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
3910
3911 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
3912 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
3913 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
3914 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
3915
3916 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
3929 StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3930 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3931 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3932 }
3933
3934 if (VM_Version::supports_float16()) {
3935 // For results consistency both intrinsics should be enabled.
3936 // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3937 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3938 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3939 StubRoutines::_hf2f = generate_float16ToFloat();
3940 StubRoutines::_f2hf = generate_floatToFloat16();
3941 }
3942 }
3943
3944 generate_libm_stubs();
3945
3946 StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
3947 }
3948
3949 void StubGenerator::generate_continuation_stubs() {
3950 // Continuation stubs:
3951 StubRoutines::_cont_thaw = generate_cont_thaw();
3952 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
3953 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
3954 }
3955
3956 void StubGenerator::generate_final_stubs() {
3957 // Generates the rest of stubs and initializes the entry points
3958
3959 // support for verify_oop (must happen after universe_init)
3960 if (VerifyOops) {
3961 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
3962 }
3963
3964 // data cache line writeback
3965 StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
3966 StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
3967
3968 // arraycopy stubs used by compilers
|
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.hpp"
27 #include "asm/macroAssembler.hpp"
28 #include "classfile/javaClasses.hpp"
29 #include "classfile/vmIntrinsics.hpp"
30 #include "compiler/oopMap.hpp"
31 #include "gc/shared/barrierSet.hpp"
32 #include "gc/shared/barrierSetAssembler.hpp"
33 #include "gc/shared/barrierSetNMethod.hpp"
34 #include "gc/shared/gc_globals.hpp"
35 #include "memory/universe.hpp"
36 #include "prims/jvmtiExport.hpp"
37 #include "prims/upcallLinker.hpp"
38 #include "runtime/arguments.hpp"
39 #include "runtime/javaThread.hpp"
40 #include "runtime/sharedRuntime.hpp"
41 #include "runtime/stubRoutines.hpp"
42 #include "utilities/macros.hpp"
43 #include "vmreg_x86.inline.hpp"
44 #include "stubGenerator_x86_64.hpp"
45 #ifdef COMPILER2
46 #include "opto/runtime.hpp"
47 #include "opto/c2_globals.hpp"
48 #endif
49 #if INCLUDE_JVMCI
50 #include "jvmci/jvmci_globals.hpp"
51 #endif
52
53 // For a more detailed description of the stub routine structure
54 // see the comment in stubRoutines.hpp
55
56 #define __ _masm->
57 #define TIMES_OOP (UseCompressedOops ? Address::times_4 : Address::times_8)
58
59 #ifdef PRODUCT
60 #define BLOCK_COMMENT(str) /* nothing */
61 #else
62 #define BLOCK_COMMENT(str) __ block_comment(str)
63 #endif // PRODUCT
289 __ BIND(loop);
290 __ movptr(rax, Address(c_rarg2, 0));// get parameter
291 __ addptr(c_rarg2, wordSize); // advance to next parameter
292 __ decrementl(c_rarg1); // decrement counter
293 __ push(rax); // pass parameter
294 __ jcc(Assembler::notZero, loop);
295
296 // call Java function
297 __ BIND(parameters_done);
298 __ movptr(rbx, method); // get Method*
299 __ movptr(c_rarg1, entry_point); // get entry_point
300 __ mov(r13, rsp); // set sender sp
301 BLOCK_COMMENT("call Java function");
302 __ call(c_rarg1);
303
304 BLOCK_COMMENT("call_stub_return_address:");
305 return_address = __ pc();
306
307 // store result depending on type (everything that is not
308 // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
309 __ movptr(r13, result);
310 Label is_long, is_float, is_double, check_prim, exit;
311 __ movl(rbx, result_type);
312 __ cmpl(rbx, T_OBJECT);
313 __ jcc(Assembler::equal, check_prim);
314 __ cmpl(rbx, T_LONG);
315 __ jcc(Assembler::equal, is_long);
316 __ cmpl(rbx, T_FLOAT);
317 __ jcc(Assembler::equal, is_float);
318 __ cmpl(rbx, T_DOUBLE);
319 __ jcc(Assembler::equal, is_double);
320 #ifdef ASSERT
321 // make sure the type is INT
322 {
323 Label L;
324 __ cmpl(rbx, T_INT);
325 __ jcc(Assembler::equal, L);
326 __ stop("StubRoutines::call_stub: unexpected result type");
327 __ bind(L);
328 }
329 #endif
330
331 // handle T_INT case
332 __ movl(Address(r13, 0), rax);
333
334 __ BIND(exit);
335
336 // pop parameters
337 __ lea(rsp, rsp_after_call);
338
339 #ifdef ASSERT
340 // verify that threads correspond
341 {
342 Label L1, L2, L3;
343 __ cmpptr(r15_thread, thread);
344 __ jcc(Assembler::equal, L1);
345 __ stop("StubRoutines::call_stub: r15_thread is corrupted");
346 __ bind(L1);
347 __ get_thread(rbx);
348 __ cmpptr(r15_thread, thread);
349 __ jcc(Assembler::equal, L2);
350 __ stop("StubRoutines::call_stub: r15_thread is modified by call");
351 __ bind(L2);
352 __ cmpptr(r15_thread, rbx);
370 __ movptr(r13, r13_save);
371 __ movptr(r12, r12_save);
372 __ movptr(rbx, rbx_save);
373
374 #ifdef _WIN64
375 __ movptr(rdi, rdi_save);
376 __ movptr(rsi, rsi_save);
377 #else
378 __ ldmxcsr(mxcsr_save);
379 #endif
380
381 // restore rsp
382 __ addptr(rsp, -rsp_after_call_off * wordSize);
383
384 // return
385 __ vzeroupper();
386 __ pop(rbp);
387 __ ret(0);
388
389 // handle return types different from T_INT
390 __ BIND(check_prim);
391 if (InlineTypeReturnedAsFields) {
392 // Check for scalarized return value
393 __ testptr(rax, 1);
394 __ jcc(Assembler::zero, is_long);
395 // Load pack handler address
396 __ andptr(rax, -2);
397 __ movptr(rax, Address(rax, InstanceKlass::adr_inlineklass_fixed_block_offset()));
398 __ movptr(rbx, Address(rax, InlineKlass::pack_handler_jobject_offset()));
399 // Call pack handler to initialize the buffer
400 __ call(rbx);
401 __ jmp(exit);
402 }
403 __ BIND(is_long);
404 __ movq(Address(r13, 0), rax);
405 __ jmp(exit);
406
407 __ BIND(is_float);
408 __ movflt(Address(r13, 0), xmm0);
409 __ jmp(exit);
410
411 __ BIND(is_double);
412 __ movdbl(Address(r13, 0), xmm0);
413 __ jmp(exit);
414
415 return start;
416 }
417
418 // Return point for a Java call if there's an exception thrown in
419 // Java code. The exception is caught and transformed into a
420 // pending exception stored in JavaThread that can be tested from
421 // within the VM.
422 //
423 // Note: Usually the parameters are removed by the callee. In case
424 // of an exception crossing an activation frame boundary, that is
425 // not the case if the callee is compiled code => need to setup the
426 // rsp.
427 //
428 // rax: exception oop
429
430 address StubGenerator::generate_catch_exception() {
431 StubCodeMark mark(this, "StubRoutines", "catch_exception");
432 address start = __ pc();
3893 // Initialization
3894 void StubGenerator::generate_initial_stubs() {
3895 // Generates all stubs and initializes the entry points
3896
3897 // This platform-specific settings are needed by generate_call_stub()
3898 create_control_words();
3899
3900 // Initialize table for unsafe copy memeory check.
3901 if (UnsafeMemoryAccess::_table == nullptr) {
3902 UnsafeMemoryAccess::create_table(16 + 4); // 16 for copyMemory; 4 for setMemory
3903 }
3904
3905 // entry points that exist in all platforms Note: This is code
3906 // that could be shared among different platforms - however the
3907 // benefit seems to be smaller than the disadvantage of having a
3908 // much more complicated generator structure. See also comment in
3909 // stubRoutines.hpp.
3910
3911 StubRoutines::_forward_exception_entry = generate_forward_exception();
3912
3913 // Generate these first because they are called from other stubs
3914 if (InlineTypeReturnedAsFields) {
3915 StubRoutines::_load_inline_type_fields_in_regs =
3916 generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::load_inline_type_fields_in_regs),
3917 "load_inline_type_fields_in_regs", false);
3918 StubRoutines::_store_inline_type_fields_to_buf =
3919 generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::store_inline_type_fields_to_buf),
3920 "store_inline_type_fields_to_buf", true);
3921 }
3922
3923 StubRoutines::_call_stub_entry =
3924 generate_call_stub(StubRoutines::_call_stub_return_address);
3925
3926 // is referenced by megamorphic call
3927 StubRoutines::_catch_exception_entry = generate_catch_exception();
3928
3929 // atomic calls
3930 StubRoutines::_fence_entry = generate_orderaccess_fence();
3931
3932 // platform dependent
3933 StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
3934
3935 StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
3936
3937 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
3938 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
3939 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
3940 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
3941
3942 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
3955 StubRoutines::x86::generate_CRC32C_table(supports_clmul);
3956 StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
3957 StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
3958 }
3959
3960 if (VM_Version::supports_float16()) {
3961 // For results consistency both intrinsics should be enabled.
3962 // vmIntrinsics checks InlineIntrinsics flag, no need to check it here.
3963 if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_float16ToFloat) &&
3964 vmIntrinsics::is_intrinsic_available(vmIntrinsics::_floatToFloat16)) {
3965 StubRoutines::_hf2f = generate_float16ToFloat();
3966 StubRoutines::_f2hf = generate_floatToFloat16();
3967 }
3968 }
3969
3970 generate_libm_stubs();
3971
3972 StubRoutines::_fmod = generate_libmFmod(); // from stubGenerator_x86_64_fmod.cpp
3973 }
3974
3975 // Call here from the interpreter or compiled code to either load
3976 // multiple returned values from the inline type instance being
3977 // returned to registers or to store returned values to a newly
3978 // allocated inline type instance.
3979 // Register is a class, but it would be assigned numerical value.
3980 // "0" is assigned for xmm0. Thus we need to ignore -Wnonnull.
3981 PRAGMA_DIAG_PUSH
3982 PRAGMA_NONNULL_IGNORED
3983 address StubGenerator::generate_return_value_stub(address destination, const char* name, bool has_res) {
3984 // We need to save all registers the calling convention may use so
3985 // the runtime calls read or update those registers. This needs to
3986 // be in sync with SharedRuntime::java_return_convention().
3987 enum layout {
3988 pad_off = frame::arg_reg_save_area_bytes/BytesPerInt, pad_off_2,
3989 rax_off, rax_off_2,
3990 j_rarg5_off, j_rarg5_2,
3991 j_rarg4_off, j_rarg4_2,
3992 j_rarg3_off, j_rarg3_2,
3993 j_rarg2_off, j_rarg2_2,
3994 j_rarg1_off, j_rarg1_2,
3995 j_rarg0_off, j_rarg0_2,
3996 j_farg0_off, j_farg0_2,
3997 j_farg1_off, j_farg1_2,
3998 j_farg2_off, j_farg2_2,
3999 j_farg3_off, j_farg3_2,
4000 j_farg4_off, j_farg4_2,
4001 j_farg5_off, j_farg5_2,
4002 j_farg6_off, j_farg6_2,
4003 j_farg7_off, j_farg7_2,
4004 rbp_off, rbp_off_2,
4005 return_off, return_off_2,
4006
4007 framesize
4008 };
4009
4010 CodeBuffer buffer(name, 1000, 512);
4011 MacroAssembler* _masm = new MacroAssembler(&buffer);
4012
4013 int frame_size_in_bytes = align_up(framesize*BytesPerInt, 16);
4014 assert(frame_size_in_bytes == framesize*BytesPerInt, "misaligned");
4015 int frame_size_in_slots = frame_size_in_bytes / BytesPerInt;
4016 int frame_size_in_words = frame_size_in_bytes / wordSize;
4017
4018 OopMapSet *oop_maps = new OopMapSet();
4019 OopMap* map = new OopMap(frame_size_in_slots, 0);
4020
4021 map->set_callee_saved(VMRegImpl::stack2reg(rax_off), rax->as_VMReg());
4022 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg5_off), j_rarg5->as_VMReg());
4023 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg4_off), j_rarg4->as_VMReg());
4024 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg3_off), j_rarg3->as_VMReg());
4025 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg2_off), j_rarg2->as_VMReg());
4026 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg1_off), j_rarg1->as_VMReg());
4027 map->set_callee_saved(VMRegImpl::stack2reg(j_rarg0_off), j_rarg0->as_VMReg());
4028 map->set_callee_saved(VMRegImpl::stack2reg(j_farg0_off), j_farg0->as_VMReg());
4029 map->set_callee_saved(VMRegImpl::stack2reg(j_farg1_off), j_farg1->as_VMReg());
4030 map->set_callee_saved(VMRegImpl::stack2reg(j_farg2_off), j_farg2->as_VMReg());
4031 map->set_callee_saved(VMRegImpl::stack2reg(j_farg3_off), j_farg3->as_VMReg());
4032 map->set_callee_saved(VMRegImpl::stack2reg(j_farg4_off), j_farg4->as_VMReg());
4033 map->set_callee_saved(VMRegImpl::stack2reg(j_farg5_off), j_farg5->as_VMReg());
4034 map->set_callee_saved(VMRegImpl::stack2reg(j_farg6_off), j_farg6->as_VMReg());
4035 map->set_callee_saved(VMRegImpl::stack2reg(j_farg7_off), j_farg7->as_VMReg());
4036
4037 int start = __ offset();
4038
4039 __ subptr(rsp, frame_size_in_bytes - 8 /* return address*/);
4040
4041 __ movptr(Address(rsp, rbp_off * BytesPerInt), rbp);
4042 __ movdbl(Address(rsp, j_farg7_off * BytesPerInt), j_farg7);
4043 __ movdbl(Address(rsp, j_farg6_off * BytesPerInt), j_farg6);
4044 __ movdbl(Address(rsp, j_farg5_off * BytesPerInt), j_farg5);
4045 __ movdbl(Address(rsp, j_farg4_off * BytesPerInt), j_farg4);
4046 __ movdbl(Address(rsp, j_farg3_off * BytesPerInt), j_farg3);
4047 __ movdbl(Address(rsp, j_farg2_off * BytesPerInt), j_farg2);
4048 __ movdbl(Address(rsp, j_farg1_off * BytesPerInt), j_farg1);
4049 __ movdbl(Address(rsp, j_farg0_off * BytesPerInt), j_farg0);
4050
4051 __ movptr(Address(rsp, j_rarg0_off * BytesPerInt), j_rarg0);
4052 __ movptr(Address(rsp, j_rarg1_off * BytesPerInt), j_rarg1);
4053 __ movptr(Address(rsp, j_rarg2_off * BytesPerInt), j_rarg2);
4054 __ movptr(Address(rsp, j_rarg3_off * BytesPerInt), j_rarg3);
4055 __ movptr(Address(rsp, j_rarg4_off * BytesPerInt), j_rarg4);
4056 __ movptr(Address(rsp, j_rarg5_off * BytesPerInt), j_rarg5);
4057 __ movptr(Address(rsp, rax_off * BytesPerInt), rax);
4058
4059 int frame_complete = __ offset();
4060
4061 __ set_last_Java_frame(noreg, noreg, nullptr, rscratch1);
4062
4063 __ mov(c_rarg0, r15_thread);
4064 __ mov(c_rarg1, rax);
4065
4066 __ call(RuntimeAddress(destination));
4067
4068 // Set an oopmap for the call site.
4069
4070 oop_maps->add_gc_map( __ offset() - start, map);
4071
4072 // clear last_Java_sp
4073 __ reset_last_Java_frame(false);
4074
4075 __ movptr(rbp, Address(rsp, rbp_off * BytesPerInt));
4076 __ movdbl(j_farg7, Address(rsp, j_farg7_off * BytesPerInt));
4077 __ movdbl(j_farg6, Address(rsp, j_farg6_off * BytesPerInt));
4078 __ movdbl(j_farg5, Address(rsp, j_farg5_off * BytesPerInt));
4079 __ movdbl(j_farg4, Address(rsp, j_farg4_off * BytesPerInt));
4080 __ movdbl(j_farg3, Address(rsp, j_farg3_off * BytesPerInt));
4081 __ movdbl(j_farg2, Address(rsp, j_farg2_off * BytesPerInt));
4082 __ movdbl(j_farg1, Address(rsp, j_farg1_off * BytesPerInt));
4083 __ movdbl(j_farg0, Address(rsp, j_farg0_off * BytesPerInt));
4084
4085 __ movptr(j_rarg0, Address(rsp, j_rarg0_off * BytesPerInt));
4086 __ movptr(j_rarg1, Address(rsp, j_rarg1_off * BytesPerInt));
4087 __ movptr(j_rarg2, Address(rsp, j_rarg2_off * BytesPerInt));
4088 __ movptr(j_rarg3, Address(rsp, j_rarg3_off * BytesPerInt));
4089 __ movptr(j_rarg4, Address(rsp, j_rarg4_off * BytesPerInt));
4090 __ movptr(j_rarg5, Address(rsp, j_rarg5_off * BytesPerInt));
4091 __ movptr(rax, Address(rsp, rax_off * BytesPerInt));
4092
4093 __ addptr(rsp, frame_size_in_bytes-8);
4094
4095 // check for pending exceptions
4096 Label pending;
4097 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
4098 __ jcc(Assembler::notEqual, pending);
4099
4100 if (has_res) {
4101 __ get_vm_result(rax, r15_thread);
4102 }
4103
4104 __ ret(0);
4105
4106 __ bind(pending);
4107
4108 __ movptr(rax, Address(r15_thread, Thread::pending_exception_offset()));
4109 __ jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
4110
4111 // -------------
4112 // make sure all code is generated
4113 _masm->flush();
4114
4115 RuntimeStub* stub = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, false);
4116 return stub->entry_point();
4117 }
4118
4119 void StubGenerator::generate_continuation_stubs() {
4120 // Continuation stubs:
4121 StubRoutines::_cont_thaw = generate_cont_thaw();
4122 StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
4123 StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
4124 }
4125
4126 void StubGenerator::generate_final_stubs() {
4127 // Generates the rest of stubs and initializes the entry points
4128
4129 // support for verify_oop (must happen after universe_init)
4130 if (VerifyOops) {
4131 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
4132 }
4133
4134 // data cache line writeback
4135 StubRoutines::_data_cache_writeback = generate_data_cache_writeback();
4136 StubRoutines::_data_cache_writeback_sync = generate_data_cache_writeback_sync();
4137
4138 // arraycopy stubs used by compilers
|