793 for (int p = 1; p <= n_shadow_pages; p++) {
794 __ bang_stack_with_offset(p*page_size);
795 }
796
797 // Record the new watermark, but only if update is above the safe limit.
798 // Otherwise, the next time around the check above would pass the safe limit.
799 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
800 __ jccb(Assembler::belowEqual, L_done);
801 __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
802
803 __ bind(L_done);
804
805 #ifndef _LP64
806 __ pop(thread);
807 #endif
808 }
809
810 // Interpreter stub for calling a native method. (asm interpreter)
811 // This sets up a somewhat different looking stack for calling the
812 // native method than the typical interpreter frame setup.
813 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
814 // determine code generation flags
815 bool inc_counter = UseCompiler || CountCompiledCalls;
816
817 // rbx: Method*
818 // rbcp: sender sp
819
820 address entry_point = __ pc();
821
822 const Address constMethod (rbx, Method::const_offset());
823 const Address access_flags (rbx, Method::access_flags_offset());
824 const Address size_of_parameters(rcx, ConstMethod::
825 size_of_parameters_offset());
826
827
828 // get parameter size (always needed)
829 __ movptr(rcx, constMethod);
830 __ load_unsigned_short(rcx, size_of_parameters);
831
832 // native calls don't need the stack size check since they have no
833 // expression stack and the arguments are already on the stack and
919 }
920
921 // start execution
922 #ifdef ASSERT
923 {
924 Label L;
925 const Address monitor_block_top(rbp,
926 frame::interpreter_frame_monitor_block_top_offset * wordSize);
927 __ movptr(rax, monitor_block_top);
928 __ lea(rax, Address(rbp, rax, Address::times_ptr));
929 __ cmpptr(rax, rsp);
930 __ jcc(Assembler::equal, L);
931 __ stop("broken stack frame setup in interpreter 5");
932 __ bind(L);
933 }
934 #endif
935
936 // jvmti support
937 __ notify_method_entry();
938
939 // work registers
940 const Register method = rbx;
941 const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread);
942 const Register t = NOT_LP64(rcx) LP64_ONLY(r11);
943
944 // allocate space for parameters
945 __ get_method(method);
946 __ movptr(t, Address(method, Method::const_offset()));
947 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
948
949 #ifndef _LP64
950 __ shlptr(t, Interpreter::logStackElementSize); // Convert parameter count to bytes.
951 __ addptr(t, 2*wordSize); // allocate two more slots for JNIEnv and possible mirror
952 __ subptr(rsp, t);
953 __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
954 #else
955 __ shll(t, Interpreter::logStackElementSize);
956
957 __ subptr(rsp, t);
958 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1357 address entry_point = __ pc();
1358
1359 // abstract method entry
1360
1361 // pop return address, reset last_sp to null
1362 __ empty_expression_stack();
1363 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1364 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1365
1366 // throw exception
1367 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1368 // the call_VM checks for exception, so we should never return here.
1369 __ should_not_reach_here();
1370
1371 return entry_point;
1372 }
1373
1374 //
1375 // Generic interpreted method entry to (asm) interpreter
1376 //
1377 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1378 // determine code generation flags
1379 bool inc_counter = UseCompiler || CountCompiledCalls;
1380
1381 // ebx: Method*
1382 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1383 address entry_point = __ pc();
1384
1385 const Address constMethod(rbx, Method::const_offset());
1386 const Address access_flags(rbx, Method::access_flags_offset());
1387 const Address size_of_parameters(rdx,
1388 ConstMethod::size_of_parameters_offset());
1389 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1390
1391
1392 // get parameter size (always needed)
1393 __ movptr(rdx, constMethod);
1394 __ load_unsigned_short(rcx, size_of_parameters);
1395
1396 // rbx: Method*
1397 // rcx: size of parameters
1499 }
1500
1501 // start execution
1502 #ifdef ASSERT
1503 {
1504 Label L;
1505 const Address monitor_block_top (rbp,
1506 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1507 __ movptr(rax, monitor_block_top);
1508 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1509 __ cmpptr(rax, rsp);
1510 __ jcc(Assembler::equal, L);
1511 __ stop("broken stack frame setup in interpreter 6");
1512 __ bind(L);
1513 }
1514 #endif
1515
1516 // jvmti support
1517 __ notify_method_entry();
1518
1519 __ dispatch_next(vtos);
1520
1521 // invocation counter overflow
1522 if (inc_counter) {
1523 // Handle overflow of counter and compile method
1524 __ bind(invocation_counter_overflow);
1525 generate_counter_overflow(continue_after_compile);
1526 }
1527
1528 return entry_point;
1529 }
1530
1531 //-----------------------------------------------------------------------------
1532 // Exceptions
1533
1534 void TemplateInterpreterGenerator::generate_throw_exception() {
1535 // Entry point in previous activation (i.e., if the caller was
1536 // interpreted)
1537 Interpreter::_rethrow_exception_entry = __ pc();
1538 // Restore sp to interpreter_frame_last_sp even though we are going
1815 #else
1816 fep = __ pc(); // ftos entry point
1817 __ push_f(xmm0);
1818 __ jmpb(L);
1819 dep = __ pc(); // dtos entry point
1820 __ push_d(xmm0);
1821 __ jmpb(L);
1822 #endif // _LP64
1823 lep = __ pc(); // ltos entry point
1824 __ push_l();
1825 __ jmpb(L);
1826 aep = bep = cep = sep = iep = __ pc(); // [abcsi]tos entry point
1827 __ push_i_or_ptr();
1828 vep = __ pc(); // vtos entry point
1829 __ bind(L);
1830 generate_and_dispatch(t);
1831 }
1832
1833 //-----------------------------------------------------------------------------
1834
1835 // Non-product code
1836 #ifndef PRODUCT
1837
1838 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
1839 address entry = __ pc();
1840
1841 #ifndef _LP64
1842 // prepare expression stack
1843 __ pop(rcx); // pop return address so expression stack is 'pure'
1844 __ push(state); // save tosca
1845
1846 // pass tosca registers as arguments & call tracer
1847 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode), rcx, rax, rdx);
1848 __ mov(rcx, rax); // make sure return address is not destroyed by pop(state)
1849 __ pop(state); // restore tosca
1850
1851 // return
1852 __ jmp(rcx);
1853 #else
1854 __ push(state);
1857 __ push(c_rarg2);
1858 __ push(c_rarg3);
1859 __ mov(c_rarg2, rax); // Pass itos
1860 #ifdef _WIN64
1861 __ movflt(xmm3, xmm0); // Pass ftos
1862 #endif
1863 __ call_VM(noreg,
1864 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1865 c_rarg1, c_rarg2, c_rarg3);
1866 __ pop(c_rarg3);
1867 __ pop(c_rarg2);
1868 __ pop(c_rarg1);
1869 __ pop(c_rarg0);
1870 __ pop(state);
1871 __ ret(0); // return from result handler
1872 #endif // _LP64
1873
1874 return entry;
1875 }
1876
1877 void TemplateInterpreterGenerator::count_bytecode() {
1878 __ incrementl(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1879 }
1880
1881 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1882 __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1883 }
1884
1885 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1886 __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1887 __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1888 __ orl(rbx,
1889 ((int) t->bytecode()) <<
1890 BytecodePairHistogram::log2_number_of_codes);
1891 __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1892 __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1893 __ incrementl(Address(rscratch1, rbx, Address::times_4));
1894 }
1895
1896
1897 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1898 // Call a little run-time stub to avoid blow-up for each bytecode.
1899 // The run-time runtime saves the right registers, depending on
1900 // the tosca in-state for the given template.
1901
1902 assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1903 "entry must have been generated");
1904 #ifndef _LP64
|
793 for (int p = 1; p <= n_shadow_pages; p++) {
794 __ bang_stack_with_offset(p*page_size);
795 }
796
797 // Record the new watermark, but only if update is above the safe limit.
798 // Otherwise, the next time around the check above would pass the safe limit.
799 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
800 __ jccb(Assembler::belowEqual, L_done);
801 __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
802
803 __ bind(L_done);
804
805 #ifndef _LP64
806 __ pop(thread);
807 #endif
808 }
809
810 // Interpreter stub for calling a native method. (asm interpreter)
811 // This sets up a somewhat different looking stack for calling the
812 // native method than the typical interpreter frame setup.
813 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
814 // determine code generation flags
815 bool inc_counter = UseCompiler || CountCompiledCalls;
816
817 // rbx: Method*
818 // rbcp: sender sp
819
820 address entry_point = __ pc();
821
822 const Address constMethod (rbx, Method::const_offset());
823 const Address access_flags (rbx, Method::access_flags_offset());
824 const Address size_of_parameters(rcx, ConstMethod::
825 size_of_parameters_offset());
826
827
828 // get parameter size (always needed)
829 __ movptr(rcx, constMethod);
830 __ load_unsigned_short(rcx, size_of_parameters);
831
832 // native calls don't need the stack size check since they have no
833 // expression stack and the arguments are already on the stack and
919 }
920
921 // start execution
922 #ifdef ASSERT
923 {
924 Label L;
925 const Address monitor_block_top(rbp,
926 frame::interpreter_frame_monitor_block_top_offset * wordSize);
927 __ movptr(rax, monitor_block_top);
928 __ lea(rax, Address(rbp, rax, Address::times_ptr));
929 __ cmpptr(rax, rsp);
930 __ jcc(Assembler::equal, L);
931 __ stop("broken stack frame setup in interpreter 5");
932 __ bind(L);
933 }
934 #endif
935
936 // jvmti support
937 __ notify_method_entry();
938
939 // runtime upcalls
940 if (runtime_upcalls) {
941 __ generate_runtime_upcalls_on_method_entry();
942 }
943
944 // work registers
945 const Register method = rbx;
946 const Register thread = NOT_LP64(rdi) LP64_ONLY(r15_thread);
947 const Register t = NOT_LP64(rcx) LP64_ONLY(r11);
948
949 // allocate space for parameters
950 __ get_method(method);
951 __ movptr(t, Address(method, Method::const_offset()));
952 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
953
954 #ifndef _LP64
955 __ shlptr(t, Interpreter::logStackElementSize); // Convert parameter count to bytes.
956 __ addptr(t, 2*wordSize); // allocate two more slots for JNIEnv and possible mirror
957 __ subptr(rsp, t);
958 __ andptr(rsp, -(StackAlignmentInBytes)); // gcc needs 16 byte aligned stacks to do XMM intrinsics
959 #else
960 __ shll(t, Interpreter::logStackElementSize);
961
962 __ subptr(rsp, t);
963 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
1362 address entry_point = __ pc();
1363
1364 // abstract method entry
1365
1366 // pop return address, reset last_sp to null
1367 __ empty_expression_stack();
1368 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1369 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1370
1371 // throw exception
1372 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1373 // the call_VM checks for exception, so we should never return here.
1374 __ should_not_reach_here();
1375
1376 return entry_point;
1377 }
1378
1379 //
1380 // Generic interpreted method entry to (asm) interpreter
1381 //
1382 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1383 // determine code generation flags
1384 bool inc_counter = UseCompiler || CountCompiledCalls;
1385
1386 // ebx: Method*
1387 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1388 address entry_point = __ pc();
1389
1390 const Address constMethod(rbx, Method::const_offset());
1391 const Address access_flags(rbx, Method::access_flags_offset());
1392 const Address size_of_parameters(rdx,
1393 ConstMethod::size_of_parameters_offset());
1394 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1395
1396
1397 // get parameter size (always needed)
1398 __ movptr(rdx, constMethod);
1399 __ load_unsigned_short(rcx, size_of_parameters);
1400
1401 // rbx: Method*
1402 // rcx: size of parameters
1504 }
1505
1506 // start execution
1507 #ifdef ASSERT
1508 {
1509 Label L;
1510 const Address monitor_block_top (rbp,
1511 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1512 __ movptr(rax, monitor_block_top);
1513 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1514 __ cmpptr(rax, rsp);
1515 __ jcc(Assembler::equal, L);
1516 __ stop("broken stack frame setup in interpreter 6");
1517 __ bind(L);
1518 }
1519 #endif
1520
1521 // jvmti support
1522 __ notify_method_entry();
1523
1524 if (runtime_upcalls) {
1525 __ generate_runtime_upcalls_on_method_entry();
1526 }
1527
1528 __ dispatch_next(vtos);
1529
1530 // invocation counter overflow
1531 if (inc_counter) {
1532 // Handle overflow of counter and compile method
1533 __ bind(invocation_counter_overflow);
1534 generate_counter_overflow(continue_after_compile);
1535 }
1536
1537 return entry_point;
1538 }
1539
1540 //-----------------------------------------------------------------------------
1541 // Exceptions
1542
1543 void TemplateInterpreterGenerator::generate_throw_exception() {
1544 // Entry point in previous activation (i.e., if the caller was
1545 // interpreted)
1546 Interpreter::_rethrow_exception_entry = __ pc();
1547 // Restore sp to interpreter_frame_last_sp even though we are going
1824 #else
1825 fep = __ pc(); // ftos entry point
1826 __ push_f(xmm0);
1827 __ jmpb(L);
1828 dep = __ pc(); // dtos entry point
1829 __ push_d(xmm0);
1830 __ jmpb(L);
1831 #endif // _LP64
1832 lep = __ pc(); // ltos entry point
1833 __ push_l();
1834 __ jmpb(L);
1835 aep = bep = cep = sep = iep = __ pc(); // [abcsi]tos entry point
1836 __ push_i_or_ptr();
1837 vep = __ pc(); // vtos entry point
1838 __ bind(L);
1839 generate_and_dispatch(t);
1840 }
1841
1842 //-----------------------------------------------------------------------------
1843
1844 void TemplateInterpreterGenerator::count_bytecode() {
1845 #ifdef _LP64
1846 __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1847 #else
1848 Unimplemented();
1849 #endif
1850 }
1851
1852 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1853 __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1854 }
1855
1856 // Non-product code
1857 #ifndef PRODUCT
1858
1859 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
1860 address entry = __ pc();
1861
1862 #ifndef _LP64
1863 // prepare expression stack
1864 __ pop(rcx); // pop return address so expression stack is 'pure'
1865 __ push(state); // save tosca
1866
1867 // pass tosca registers as arguments & call tracer
1868 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode), rcx, rax, rdx);
1869 __ mov(rcx, rax); // make sure return address is not destroyed by pop(state)
1870 __ pop(state); // restore tosca
1871
1872 // return
1873 __ jmp(rcx);
1874 #else
1875 __ push(state);
1878 __ push(c_rarg2);
1879 __ push(c_rarg3);
1880 __ mov(c_rarg2, rax); // Pass itos
1881 #ifdef _WIN64
1882 __ movflt(xmm3, xmm0); // Pass ftos
1883 #endif
1884 __ call_VM(noreg,
1885 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1886 c_rarg1, c_rarg2, c_rarg3);
1887 __ pop(c_rarg3);
1888 __ pop(c_rarg2);
1889 __ pop(c_rarg1);
1890 __ pop(c_rarg0);
1891 __ pop(state);
1892 __ ret(0); // return from result handler
1893 #endif // _LP64
1894
1895 return entry;
1896 }
1897
1898 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1899 __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1900 __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1901 __ orl(rbx,
1902 ((int) t->bytecode()) <<
1903 BytecodePairHistogram::log2_number_of_codes);
1904 __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1905 __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1906 __ incrementl(Address(rscratch1, rbx, Address::times_4));
1907 }
1908
1909
1910 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1911 // Call a little run-time stub to avoid blow-up for each bytecode.
1912 // The run-time runtime saves the right registers, depending on
1913 // the tosca in-state for the given template.
1914
1915 assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1916 "entry must have been generated");
1917 #ifndef _LP64
|