702
703 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_growth_watermark()));
704 __ jcc(Assembler::above, L_done);
705
706 for (int p = 1; p <= n_shadow_pages; p++) {
707 __ bang_stack_with_offset(p*page_size);
708 }
709
710 // Record the new watermark, but only if update is above the safe limit.
711 // Otherwise, the next time around the check above would pass the safe limit.
712 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
713 __ jccb(Assembler::belowEqual, L_done);
714 __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
715
716 __ bind(L_done);
717 }
718
719 // Interpreter stub for calling a native method. (asm interpreter)
720 // This sets up a somewhat different looking stack for calling the
721 // native method than the typical interpreter frame setup.
722 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
723 // determine code generation flags
724 bool inc_counter = UseCompiler || CountCompiledCalls;
725
726 // rbx: Method*
727 // rbcp: sender sp
728
729 address entry_point = __ pc();
730
731 const Address constMethod (rbx, Method::const_offset());
732 const Address access_flags (rbx, Method::access_flags_offset());
733 const Address size_of_parameters(rcx, ConstMethod::
734 size_of_parameters_offset());
735
736
737 // get parameter size (always needed)
738 __ movptr(rcx, constMethod);
739 __ load_unsigned_short(rcx, size_of_parameters);
740
741 // native calls don't need the stack size check since they have no
742 // expression stack and the arguments are already on the stack and
743 // we only add a handful of words to the stack
744
825 }
826
827 // start execution
828 #ifdef ASSERT
829 {
830 Label L;
831 const Address monitor_block_top(rbp,
832 frame::interpreter_frame_monitor_block_top_offset * wordSize);
833 __ movptr(rax, monitor_block_top);
834 __ lea(rax, Address(rbp, rax, Address::times_ptr));
835 __ cmpptr(rax, rsp);
836 __ jcc(Assembler::equal, L);
837 __ stop("broken stack frame setup in interpreter 5");
838 __ bind(L);
839 }
840 #endif
841
842 // jvmti support
843 __ notify_method_entry();
844
845 // work registers
846 const Register method = rbx;
847 const Register thread = r15_thread;
848 const Register t = r11;
849
850 // allocate space for parameters
851 __ get_method(method);
852 __ movptr(t, Address(method, Method::const_offset()));
853 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
854
855 __ shll(t, Interpreter::logStackElementSize);
856
857 __ subptr(rsp, t);
858 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
859 __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
860
861 // get signature handler
862 {
863 Label L;
864 __ movptr(t, Address(method, Method::signature_handler_offset()));
1217 address entry_point = __ pc();
1218
1219 // abstract method entry
1220
1221 // pop return address, reset last_sp to null
1222 __ empty_expression_stack();
1223 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1224 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1225
1226 // throw exception
1227 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1228 // the call_VM checks for exception, so we should never return here.
1229 __ should_not_reach_here();
1230
1231 return entry_point;
1232 }
1233
1234 //
1235 // Generic interpreted method entry to (asm) interpreter
1236 //
1237 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1238 // determine code generation flags
1239 bool inc_counter = UseCompiler || CountCompiledCalls;
1240
1241 // ebx: Method*
1242 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1243 address entry_point = __ pc();
1244
1245 const Address constMethod(rbx, Method::const_offset());
1246 const Address access_flags(rbx, Method::access_flags_offset());
1247 const Address size_of_parameters(rdx,
1248 ConstMethod::size_of_parameters_offset());
1249 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1250
1251
1252 // get parameter size (always needed)
1253 __ movptr(rdx, constMethod);
1254 __ load_unsigned_short(rcx, size_of_parameters);
1255
1256 // rbx: Method*
1257 // rcx: size of parameters
1258 // rbcp: sender_sp (could differ from sp+wordSize if we were called via c2i )
1259
1356 }
1357
1358 // start execution
1359 #ifdef ASSERT
1360 {
1361 Label L;
1362 const Address monitor_block_top (rbp,
1363 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1364 __ movptr(rax, monitor_block_top);
1365 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1366 __ cmpptr(rax, rsp);
1367 __ jcc(Assembler::equal, L);
1368 __ stop("broken stack frame setup in interpreter 6");
1369 __ bind(L);
1370 }
1371 #endif
1372
1373 // jvmti support
1374 __ notify_method_entry();
1375
1376 __ dispatch_next(vtos);
1377
1378 // invocation counter overflow
1379 if (inc_counter) {
1380 // Handle overflow of counter and compile method
1381 __ bind(invocation_counter_overflow);
1382 generate_counter_overflow(continue_after_compile);
1383 }
1384
1385 return entry_point;
1386 }
1387
1388 //-----------------------------------------------------------------------------
1389 // Exceptions
1390
1391 void TemplateInterpreterGenerator::generate_throw_exception() {
1392 // Entry point in previous activation (i.e., if the caller was
1393 // interpreted)
1394 Interpreter::_rethrow_exception_entry = __ pc();
1395 // Restore sp to interpreter_frame_last_sp even though we are going
1669 __ push(c_rarg0);
1670 __ push(c_rarg1);
1671 __ push(c_rarg2);
1672 __ push(c_rarg3);
1673 __ mov(c_rarg2, rax); // Pass itos
1674 #ifdef _WIN64
1675 __ movflt(xmm3, xmm0); // Pass ftos
1676 #endif
1677 __ call_VM(noreg,
1678 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1679 c_rarg1, c_rarg2, c_rarg3);
1680 __ pop(c_rarg3);
1681 __ pop(c_rarg2);
1682 __ pop(c_rarg1);
1683 __ pop(c_rarg0);
1684 __ pop(state);
1685 __ ret(0); // return from result handler
1686
1687 return entry;
1688 }
1689
1690 void TemplateInterpreterGenerator::count_bytecode() {
1691 __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1692 }
1693
1694 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1695 __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1696 }
1697
1698 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1699 __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1700 __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1701 __ orl(rbx,
1702 ((int) t->bytecode()) <<
1703 BytecodePairHistogram::log2_number_of_codes);
1704 __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1705 __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1706 __ incrementl(Address(rscratch1, rbx, Address::times_4));
1707 }
1708
1709
1710 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1711 // Call a little run-time stub to avoid blow-up for each bytecode.
1712 // The run-time runtime saves the right registers, depending on
1713 // the tosca in-state for the given template.
1714
1715 assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1716 "entry must have been generated");
1717 __ mov(r12, rsp); // remember sp (can only use r12 if not using call_VM)
|
702
703 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_growth_watermark()));
704 __ jcc(Assembler::above, L_done);
705
706 for (int p = 1; p <= n_shadow_pages; p++) {
707 __ bang_stack_with_offset(p*page_size);
708 }
709
710 // Record the new watermark, but only if update is above the safe limit.
711 // Otherwise, the next time around the check above would pass the safe limit.
712 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
713 __ jccb(Assembler::belowEqual, L_done);
714 __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
715
716 __ bind(L_done);
717 }
718
719 // Interpreter stub for calling a native method. (asm interpreter)
720 // This sets up a somewhat different looking stack for calling the
721 // native method than the typical interpreter frame setup.
722 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
723 // determine code generation flags
724 bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
725
726 // rbx: Method*
727 // rbcp: sender sp
728
729 address entry_point = __ pc();
730
731 const Address constMethod (rbx, Method::const_offset());
732 const Address access_flags (rbx, Method::access_flags_offset());
733 const Address size_of_parameters(rcx, ConstMethod::
734 size_of_parameters_offset());
735
736
737 // get parameter size (always needed)
738 __ movptr(rcx, constMethod);
739 __ load_unsigned_short(rcx, size_of_parameters);
740
741 // native calls don't need the stack size check since they have no
742 // expression stack and the arguments are already on the stack and
743 // we only add a handful of words to the stack
744
825 }
826
827 // start execution
828 #ifdef ASSERT
829 {
830 Label L;
831 const Address monitor_block_top(rbp,
832 frame::interpreter_frame_monitor_block_top_offset * wordSize);
833 __ movptr(rax, monitor_block_top);
834 __ lea(rax, Address(rbp, rax, Address::times_ptr));
835 __ cmpptr(rax, rsp);
836 __ jcc(Assembler::equal, L);
837 __ stop("broken stack frame setup in interpreter 5");
838 __ bind(L);
839 }
840 #endif
841
842 // jvmti support
843 __ notify_method_entry();
844
845 // runtime upcalls
846 if (runtime_upcalls) {
847 __ generate_runtime_upcalls_on_method_entry();
848 }
849
850 // work registers
851 const Register method = rbx;
852 const Register thread = r15_thread;
853 const Register t = r11;
854
855 // allocate space for parameters
856 __ get_method(method);
857 __ movptr(t, Address(method, Method::const_offset()));
858 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
859
860 __ shll(t, Interpreter::logStackElementSize);
861
862 __ subptr(rsp, t);
863 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
864 __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
865
866 // get signature handler
867 {
868 Label L;
869 __ movptr(t, Address(method, Method::signature_handler_offset()));
1222 address entry_point = __ pc();
1223
1224 // abstract method entry
1225
1226 // pop return address, reset last_sp to null
1227 __ empty_expression_stack();
1228 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1229 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1230
1231 // throw exception
1232 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1233 // the call_VM checks for exception, so we should never return here.
1234 __ should_not_reach_here();
1235
1236 return entry_point;
1237 }
1238
1239 //
1240 // Generic interpreted method entry to (asm) interpreter
1241 //
1242 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1243 // determine code generation flags
1244 bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1245
1246 // ebx: Method*
1247 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1248 address entry_point = __ pc();
1249
1250 const Address constMethod(rbx, Method::const_offset());
1251 const Address access_flags(rbx, Method::access_flags_offset());
1252 const Address size_of_parameters(rdx,
1253 ConstMethod::size_of_parameters_offset());
1254 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1255
1256
1257 // get parameter size (always needed)
1258 __ movptr(rdx, constMethod);
1259 __ load_unsigned_short(rcx, size_of_parameters);
1260
1261 // rbx: Method*
1262 // rcx: size of parameters
1263 // rbcp: sender_sp (could differ from sp+wordSize if we were called via c2i )
1264
1361 }
1362
1363 // start execution
1364 #ifdef ASSERT
1365 {
1366 Label L;
1367 const Address monitor_block_top (rbp,
1368 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1369 __ movptr(rax, monitor_block_top);
1370 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1371 __ cmpptr(rax, rsp);
1372 __ jcc(Assembler::equal, L);
1373 __ stop("broken stack frame setup in interpreter 6");
1374 __ bind(L);
1375 }
1376 #endif
1377
1378 // jvmti support
1379 __ notify_method_entry();
1380
1381 if (runtime_upcalls) {
1382 __ generate_runtime_upcalls_on_method_entry();
1383 }
1384
1385 __ dispatch_next(vtos);
1386
1387 // invocation counter overflow
1388 if (inc_counter) {
1389 // Handle overflow of counter and compile method
1390 __ bind(invocation_counter_overflow);
1391 generate_counter_overflow(continue_after_compile);
1392 }
1393
1394 return entry_point;
1395 }
1396
1397 //-----------------------------------------------------------------------------
1398 // Exceptions
1399
1400 void TemplateInterpreterGenerator::generate_throw_exception() {
1401 // Entry point in previous activation (i.e., if the caller was
1402 // interpreted)
1403 Interpreter::_rethrow_exception_entry = __ pc();
1404 // Restore sp to interpreter_frame_last_sp even though we are going
1678 __ push(c_rarg0);
1679 __ push(c_rarg1);
1680 __ push(c_rarg2);
1681 __ push(c_rarg3);
1682 __ mov(c_rarg2, rax); // Pass itos
1683 #ifdef _WIN64
1684 __ movflt(xmm3, xmm0); // Pass ftos
1685 #endif
1686 __ call_VM(noreg,
1687 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1688 c_rarg1, c_rarg2, c_rarg3);
1689 __ pop(c_rarg3);
1690 __ pop(c_rarg2);
1691 __ pop(c_rarg1);
1692 __ pop(c_rarg0);
1693 __ pop(state);
1694 __ ret(0); // return from result handler
1695
1696 return entry;
1697 }
1698 #endif // PRODUCT
1699
1700 void TemplateInterpreterGenerator::count_bytecode() {
1701 __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1702 }
1703
1704 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1705 __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1706 }
1707
1708 #ifndef PRODUCT
1709 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1710 __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1711 __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1712 __ orl(rbx,
1713 ((int) t->bytecode()) <<
1714 BytecodePairHistogram::log2_number_of_codes);
1715 __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1716 __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1717 __ incrementl(Address(rscratch1, rbx, Address::times_4));
1718 }
1719
1720
1721 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1722 // Call a little run-time stub to avoid blow-up for each bytecode.
1723 // The run-time runtime saves the right registers, depending on
1724 // the tosca in-state for the given template.
1725
1726 assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1727 "entry must have been generated");
1728 __ mov(r12, rsp); // remember sp (can only use r12 if not using call_VM)
|