792 for (int p = 1; p <= n_shadow_pages; p++) {
793 __ bang_stack_with_offset(p*page_size);
794 }
795
796 // Record the new watermark, but only if update is above the safe limit.
797 // Otherwise, the next time around the check above would pass the safe limit.
798 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
799 __ jccb(Assembler::belowEqual, L_done);
800 __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
801
802 __ bind(L_done);
803
804 #ifndef _LP64
805 __ pop(thread);
806 #endif
807 }
808
809 // Interpreter stub for calling a native method. (asm interpreter)
810 // This sets up a somewhat different looking stack for calling the
811 // native method than the typical interpreter frame setup.
812 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
813 // determine code generation flags
814 bool inc_counter = UseCompiler || CountCompiledCalls;
815
816 // rbx: Method*
817 // rbcp: sender sp
818
819 address entry_point = __ pc();
820
821 const Address constMethod (rbx, Method::const_offset());
822 const Address access_flags (rbx, Method::access_flags_offset());
823 const Address size_of_parameters(rcx, ConstMethod::
824 size_of_parameters_offset());
825
826
827 // get parameter size (always needed)
828 __ movptr(rcx, constMethod);
829 __ load_unsigned_short(rcx, size_of_parameters);
830
831 // native calls don't need the stack size check since they have no
832 // expression stack and the arguments are already on the stack and
833 // we only add a handful of words to the stack
834
918 }
919
920 // start execution
921 #ifdef ASSERT
922 {
923 Label L;
924 const Address monitor_block_top(rbp,
925 frame::interpreter_frame_monitor_block_top_offset * wordSize);
926 __ movptr(rax, monitor_block_top);
927 __ lea(rax, Address(rbp, rax, Address::times_ptr));
928 __ cmpptr(rax, rsp);
929 __ jcc(Assembler::equal, L);
930 __ stop("broken stack frame setup in interpreter 5");
931 __ bind(L);
932 }
933 #endif
934
935 // jvmti support
936 __ notify_method_entry();
937
938 // work registers
939 const Register method = rbx;
940 const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread);
941 const Register t = NOT_LP64(rcx) LP64_ONLY(r11);
942
943 // allocate space for parameters
944 __ get_method(method);
945 __ movptr(t, Address(method, Method::const_offset()));
946 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
947
948 #ifndef _LP64
949 __ shlptr(t, Interpreter::logStackElementSize); // Convert parameter count to bytes.
950 __ addptr(t, 2*wordSize); // allocate two more slots for JNIEnv and possible mirror
951 __ subptr(rsp, t);
952 __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
953 #else
954 __ shll(t, Interpreter::logStackElementSize);
955
956 __ subptr(rsp, t);
957 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1356 address entry_point = __ pc();
1357
1358 // abstract method entry
1359
1360 // pop return address, reset last_sp to null
1361 __ empty_expression_stack();
1362 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1363 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1364
1365 // throw exception
1366 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1367 // the call_VM checks for exception, so we should never return here.
1368 __ should_not_reach_here();
1369
1370 return entry_point;
1371 }
1372
1373 //
1374 // Generic interpreted method entry to (asm) interpreter
1375 //
1376 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1377 // determine code generation flags
1378 bool inc_counter = UseCompiler || CountCompiledCalls;
1379
1380 // ebx: Method*
1381 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1382 address entry_point = __ pc();
1383
1384 const Address constMethod(rbx, Method::const_offset());
1385 const Address access_flags(rbx, Method::access_flags_offset());
1386 const Address size_of_parameters(rdx,
1387 ConstMethod::size_of_parameters_offset());
1388 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1389
1390
1391 // get parameter size (always needed)
1392 __ movptr(rdx, constMethod);
1393 __ load_unsigned_short(rcx, size_of_parameters);
1394
1395 // rbx: Method*
1396 // rcx: size of parameters
1397 // rbcp: sender_sp (could differ from sp+wordSize if we were called via c2i )
1398
1498 }
1499
1500 // start execution
1501 #ifdef ASSERT
1502 {
1503 Label L;
1504 const Address monitor_block_top (rbp,
1505 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1506 __ movptr(rax, monitor_block_top);
1507 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1508 __ cmpptr(rax, rsp);
1509 __ jcc(Assembler::equal, L);
1510 __ stop("broken stack frame setup in interpreter 6");
1511 __ bind(L);
1512 }
1513 #endif
1514
1515 // jvmti support
1516 __ notify_method_entry();
1517
1518 __ dispatch_next(vtos);
1519
1520 // invocation counter overflow
1521 if (inc_counter) {
1522 // Handle overflow of counter and compile method
1523 __ bind(invocation_counter_overflow);
1524 generate_counter_overflow(continue_after_compile);
1525 }
1526
1527 return entry_point;
1528 }
1529
1530 //-----------------------------------------------------------------------------
1531 // Exceptions
1532
1533 void TemplateInterpreterGenerator::generate_throw_exception() {
1534 // Entry point in previous activation (i.e., if the caller was
1535 // interpreted)
1536 Interpreter::_rethrow_exception_entry = __ pc();
1537 // Restore sp to interpreter_frame_last_sp even though we are going
1814 #else
1815 fep = __ pc(); // ftos entry point
1816 __ push_f(xmm0);
1817 __ jmpb(L);
1818 dep = __ pc(); // dtos entry point
1819 __ push_d(xmm0);
1820 __ jmpb(L);
1821 #endif // _LP64
1822 lep = __ pc(); // ltos entry point
1823 __ push_l();
1824 __ jmpb(L);
1825 aep = bep = cep = sep = iep = __ pc(); // [abcsi]tos entry point
1826 __ push_i_or_ptr();
1827 vep = __ pc(); // vtos entry point
1828 __ bind(L);
1829 generate_and_dispatch(t);
1830 }
1831
1832 //-----------------------------------------------------------------------------
1833
1834 // Non-product code
1835 #ifndef PRODUCT
1836
1837 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
1838 address entry = __ pc();
1839
1840 #ifndef _LP64
1841 // prepare expression stack
1842 __ pop(rcx); // pop return address so expression stack is 'pure'
1843 __ push(state); // save tosca
1844
1845 // pass tosca registers as arguments & call tracer
1846 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode), rcx, rax, rdx);
1847 __ mov(rcx, rax); // make sure return address is not destroyed by pop(state)
1848 __ pop(state); // restore tosca
1849
1850 // return
1851 __ jmp(rcx);
1852 #else
1853 __ push(state);
1856 __ push(c_rarg2);
1857 __ push(c_rarg3);
1858 __ mov(c_rarg2, rax); // Pass itos
1859 #ifdef _WIN64
1860 __ movflt(xmm3, xmm0); // Pass ftos
1861 #endif
1862 __ call_VM(noreg,
1863 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1864 c_rarg1, c_rarg2, c_rarg3);
1865 __ pop(c_rarg3);
1866 __ pop(c_rarg2);
1867 __ pop(c_rarg1);
1868 __ pop(c_rarg0);
1869 __ pop(state);
1870 __ ret(0); // return from result handler
1871 #endif // _LP64
1872
1873 return entry;
1874 }
1875
1876 void TemplateInterpreterGenerator::count_bytecode() {
1877 __ incrementl(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1878 }
1879
1880 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1881 __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1882 }
1883
1884 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1885 __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1886 __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1887 __ orl(rbx,
1888 ((int) t->bytecode()) <<
1889 BytecodePairHistogram::log2_number_of_codes);
1890 __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1891 __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1892 __ incrementl(Address(rscratch1, rbx, Address::times_4));
1893 }
1894
1895
1896 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1897 // Call a little run-time stub to avoid blow-up for each bytecode.
1898 // The run-time runtime saves the right registers, depending on
1899 // the tosca in-state for the given template.
1900
1901 assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1902 "entry must have been generated");
1903 #ifndef _LP64
|
792 for (int p = 1; p <= n_shadow_pages; p++) {
793 __ bang_stack_with_offset(p*page_size);
794 }
795
796 // Record the new watermark, but only if update is above the safe limit.
797 // Otherwise, the next time around the check above would pass the safe limit.
798 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
799 __ jccb(Assembler::belowEqual, L_done);
800 __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
801
802 __ bind(L_done);
803
804 #ifndef _LP64
805 __ pop(thread);
806 #endif
807 }
808
809 // Interpreter stub for calling a native method. (asm interpreter)
810 // This sets up a somewhat different looking stack for calling the
811 // native method than the typical interpreter frame setup.
812 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
813 // determine code generation flags
814 bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
815
816 // rbx: Method*
817 // rbcp: sender sp
818
819 address entry_point = __ pc();
820
821 const Address constMethod (rbx, Method::const_offset());
822 const Address access_flags (rbx, Method::access_flags_offset());
823 const Address size_of_parameters(rcx, ConstMethod::
824 size_of_parameters_offset());
825
826
827 // get parameter size (always needed)
828 __ movptr(rcx, constMethod);
829 __ load_unsigned_short(rcx, size_of_parameters);
830
831 // native calls don't need the stack size check since they have no
832 // expression stack and the arguments are already on the stack and
833 // we only add a handful of words to the stack
834
918 }
919
920 // start execution
921 #ifdef ASSERT
922 {
923 Label L;
924 const Address monitor_block_top(rbp,
925 frame::interpreter_frame_monitor_block_top_offset * wordSize);
926 __ movptr(rax, monitor_block_top);
927 __ lea(rax, Address(rbp, rax, Address::times_ptr));
928 __ cmpptr(rax, rsp);
929 __ jcc(Assembler::equal, L);
930 __ stop("broken stack frame setup in interpreter 5");
931 __ bind(L);
932 }
933 #endif
934
935 // jvmti support
936 __ notify_method_entry();
937
938 // runtime upcalls
939 if (runtime_upcalls) {
940 __ generate_runtime_upcalls_on_method_entry();
941 }
942
943 // work registers
944 const Register method = rbx;
945 const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread);
946 const Register t = NOT_LP64(rcx) LP64_ONLY(r11);
947
948 // allocate space for parameters
949 __ get_method(method);
950 __ movptr(t, Address(method, Method::const_offset()));
951 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
952
953 #ifndef _LP64
954 __ shlptr(t, Interpreter::logStackElementSize); // Convert parameter count to bytes.
955 __ addptr(t, 2*wordSize); // allocate two more slots for JNIEnv and possible mirror
956 __ subptr(rsp, t);
957 __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
958 #else
959 __ shll(t, Interpreter::logStackElementSize);
960
961 __ subptr(rsp, t);
962 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1361 address entry_point = __ pc();
1362
1363 // abstract method entry
1364
1365 // pop return address, reset last_sp to null
1366 __ empty_expression_stack();
1367 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1368 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1369
1370 // throw exception
1371 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1372 // the call_VM checks for exception, so we should never return here.
1373 __ should_not_reach_here();
1374
1375 return entry_point;
1376 }
1377
1378 //
1379 // Generic interpreted method entry to (asm) interpreter
1380 //
1381 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1382 // determine code generation flags
1383 bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1384
1385 // ebx: Method*
1386 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1387 address entry_point = __ pc();
1388
1389 const Address constMethod(rbx, Method::const_offset());
1390 const Address access_flags(rbx, Method::access_flags_offset());
1391 const Address size_of_parameters(rdx,
1392 ConstMethod::size_of_parameters_offset());
1393 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1394
1395
1396 // get parameter size (always needed)
1397 __ movptr(rdx, constMethod);
1398 __ load_unsigned_short(rcx, size_of_parameters);
1399
1400 // rbx: Method*
1401 // rcx: size of parameters
1402 // rbcp: sender_sp (could differ from sp+wordSize if we were called via c2i )
1403
1503 }
1504
1505 // start execution
1506 #ifdef ASSERT
1507 {
1508 Label L;
1509 const Address monitor_block_top (rbp,
1510 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1511 __ movptr(rax, monitor_block_top);
1512 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1513 __ cmpptr(rax, rsp);
1514 __ jcc(Assembler::equal, L);
1515 __ stop("broken stack frame setup in interpreter 6");
1516 __ bind(L);
1517 }
1518 #endif
1519
1520 // jvmti support
1521 __ notify_method_entry();
1522
1523 if (runtime_upcalls) {
1524 __ generate_runtime_upcalls_on_method_entry();
1525 }
1526
1527 __ dispatch_next(vtos);
1528
1529 // invocation counter overflow
1530 if (inc_counter) {
1531 // Handle overflow of counter and compile method
1532 __ bind(invocation_counter_overflow);
1533 generate_counter_overflow(continue_after_compile);
1534 }
1535
1536 return entry_point;
1537 }
1538
1539 //-----------------------------------------------------------------------------
1540 // Exceptions
1541
1542 void TemplateInterpreterGenerator::generate_throw_exception() {
1543 // Entry point in previous activation (i.e., if the caller was
1544 // interpreted)
1545 Interpreter::_rethrow_exception_entry = __ pc();
1546 // Restore sp to interpreter_frame_last_sp even though we are going
1823 #else
1824 fep = __ pc(); // ftos entry point
1825 __ push_f(xmm0);
1826 __ jmpb(L);
1827 dep = __ pc(); // dtos entry point
1828 __ push_d(xmm0);
1829 __ jmpb(L);
1830 #endif // _LP64
1831 lep = __ pc(); // ltos entry point
1832 __ push_l();
1833 __ jmpb(L);
1834 aep = bep = cep = sep = iep = __ pc(); // [abcsi]tos entry point
1835 __ push_i_or_ptr();
1836 vep = __ pc(); // vtos entry point
1837 __ bind(L);
1838 generate_and_dispatch(t);
1839 }
1840
1841 //-----------------------------------------------------------------------------
1842
1843 void TemplateInterpreterGenerator::count_bytecode() {
1844 #ifdef _LP64
1845 __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1846 #else
1847 Unimplemented();
1848 #endif
1849 }
1850
1851 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1852 __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1853 }
1854
1855 // Non-product code
1856 #ifndef PRODUCT
1857
1858 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
1859 address entry = __ pc();
1860
1861 #ifndef _LP64
1862 // prepare expression stack
1863 __ pop(rcx); // pop return address so expression stack is 'pure'
1864 __ push(state); // save tosca
1865
1866 // pass tosca registers as arguments & call tracer
1867 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode), rcx, rax, rdx);
1868 __ mov(rcx, rax); // make sure return address is not destroyed by pop(state)
1869 __ pop(state); // restore tosca
1870
1871 // return
1872 __ jmp(rcx);
1873 #else
1874 __ push(state);
1877 __ push(c_rarg2);
1878 __ push(c_rarg3);
1879 __ mov(c_rarg2, rax); // Pass itos
1880 #ifdef _WIN64
1881 __ movflt(xmm3, xmm0); // Pass ftos
1882 #endif
1883 __ call_VM(noreg,
1884 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1885 c_rarg1, c_rarg2, c_rarg3);
1886 __ pop(c_rarg3);
1887 __ pop(c_rarg2);
1888 __ pop(c_rarg1);
1889 __ pop(c_rarg0);
1890 __ pop(state);
1891 __ ret(0); // return from result handler
1892 #endif // _LP64
1893
1894 return entry;
1895 }
1896
1897 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1898 __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1899 __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1900 __ orl(rbx,
1901 ((int) t->bytecode()) <<
1902 BytecodePairHistogram::log2_number_of_codes);
1903 __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1904 __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1905 __ incrementl(Address(rscratch1, rbx, Address::times_4));
1906 }
1907
1908
1909 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1910 // Call a little run-time stub to avoid blow-up for each bytecode.
1911 // The run-time runtime saves the right registers, depending on
1912 // the tosca in-state for the given template.
1913
1914 assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1915 "entry must have been generated");
1916 #ifndef _LP64
|