702
703 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_growth_watermark()));
704 __ jcc(Assembler::above, L_done);
705
706 for (int p = 1; p <= n_shadow_pages; p++) {
707 __ bang_stack_with_offset(p*page_size);
708 }
709
710 // Record the new watermark, but only if update is above the safe limit.
711 // Otherwise, the next time around the check above would pass the safe limit.
712 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
713 __ jccb(Assembler::belowEqual, L_done);
714 __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
715
716 __ bind(L_done);
717 }
718
719 // Interpreter stub for calling a native method. (asm interpreter)
720 // This sets up a somewhat different looking stack for calling the
721 // native method than the typical interpreter frame setup.
722 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
723 // determine code generation flags
724 bool inc_counter = UseCompiler || CountCompiledCalls;
725
726 // rbx: Method*
727 // rbcp: sender sp
728
729 address entry_point = __ pc();
730
731 const Address constMethod (rbx, Method::const_offset());
732 const Address access_flags (rbx, Method::access_flags_offset());
733 const Address size_of_parameters(rcx, ConstMethod::
734 size_of_parameters_offset());
735
736
737 // get parameter size (always needed)
738 __ movptr(rcx, constMethod);
739 __ load_unsigned_short(rcx, size_of_parameters);
740
741 // native calls don't need the stack size check since they have no
742 // expression stack and the arguments are already on the stack and
743 // we only add a handful of words to the stack
744
825 }
826
827 // start execution
828 #ifdef ASSERT
829 {
830 Label L;
831 const Address monitor_block_top(rbp,
832 frame::interpreter_frame_monitor_block_top_offset * wordSize);
833 __ movptr(rax, monitor_block_top);
834 __ lea(rax, Address(rbp, rax, Address::times_ptr));
835 __ cmpptr(rax, rsp);
836 __ jcc(Assembler::equal, L);
837 __ stop("broken stack frame setup in interpreter 5");
838 __ bind(L);
839 }
840 #endif
841
842 // jvmti support
843 __ notify_method_entry();
844
845 // work registers
846 const Register method = rbx;
847 const Register thread = r15_thread;
848 const Register t = r11;
849
850 // allocate space for parameters
851 __ get_method(method);
852 __ movptr(t, Address(method, Method::const_offset()));
853 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
854
855 __ shll(t, Interpreter::logStackElementSize);
856
857 __ subptr(rsp, t);
858 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
859 __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
860
861 // get signature handler
862 {
863 Label L;
864 __ movptr(t, Address(method, Method::signature_handler_offset()));
1212 address entry_point = __ pc();
1213
1214 // abstract method entry
1215
1216 // pop return address, reset last_sp to null
1217 __ empty_expression_stack();
1218 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1219 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1220
1221 // throw exception
1222 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1223 // the call_VM checks for exception, so we should never return here.
1224 __ should_not_reach_here();
1225
1226 return entry_point;
1227 }
1228
1229 //
1230 // Generic interpreted method entry to (asm) interpreter
1231 //
1232 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1233 // determine code generation flags
1234 bool inc_counter = UseCompiler || CountCompiledCalls;
1235
1236 // ebx: Method*
1237 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1238 address entry_point = __ pc();
1239
1240 const Address constMethod(rbx, Method::const_offset());
1241 const Address access_flags(rbx, Method::access_flags_offset());
1242 const Address size_of_parameters(rdx,
1243 ConstMethod::size_of_parameters_offset());
1244 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1245
1246
1247 // get parameter size (always needed)
1248 __ movptr(rdx, constMethod);
1249 __ load_unsigned_short(rcx, size_of_parameters);
1250
1251 // rbx: Method*
1252 // rcx: size of parameters
1253 // rbcp: sender_sp (could differ from sp+wordSize if we were called via c2i )
1254
1351 }
1352
1353 // start execution
1354 #ifdef ASSERT
1355 {
1356 Label L;
1357 const Address monitor_block_top (rbp,
1358 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1359 __ movptr(rax, monitor_block_top);
1360 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1361 __ cmpptr(rax, rsp);
1362 __ jcc(Assembler::equal, L);
1363 __ stop("broken stack frame setup in interpreter 6");
1364 __ bind(L);
1365 }
1366 #endif
1367
1368 // jvmti support
1369 __ notify_method_entry();
1370
1371 __ dispatch_next(vtos);
1372
1373 // invocation counter overflow
1374 if (inc_counter) {
1375 // Handle overflow of counter and compile method
1376 __ bind(invocation_counter_overflow);
1377 generate_counter_overflow(continue_after_compile);
1378 }
1379
1380 return entry_point;
1381 }
1382
1383 //-----------------------------------------------------------------------------
1384 // Exceptions
1385
1386 void TemplateInterpreterGenerator::generate_throw_exception() {
1387 // Entry point in previous activation (i.e., if the caller was
1388 // interpreted)
1389 Interpreter::_rethrow_exception_entry = __ pc();
1390 // Restore sp to interpreter_frame_last_sp even though we are going
1665 __ push(c_rarg0);
1666 __ push(c_rarg1);
1667 __ push(c_rarg2);
1668 __ push(c_rarg3);
1669 __ mov(c_rarg2, rax); // Pass itos
1670 #ifdef _WIN64
1671 __ movflt(xmm3, xmm0); // Pass ftos
1672 #endif
1673 __ call_VM(noreg,
1674 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1675 c_rarg1, c_rarg2, c_rarg3);
1676 __ pop(c_rarg3);
1677 __ pop(c_rarg2);
1678 __ pop(c_rarg1);
1679 __ pop(c_rarg0);
1680 __ pop(state);
1681 __ ret(0); // return from result handler
1682
1683 return entry;
1684 }
1685
1686 void TemplateInterpreterGenerator::count_bytecode() {
1687 __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1688 }
1689
1690 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1691 __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1692 }
1693
1694 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1695 __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1696 __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1697 __ orl(rbx,
1698 ((int) t->bytecode()) <<
1699 BytecodePairHistogram::log2_number_of_codes);
1700 __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1701 __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1702 __ incrementl(Address(rscratch1, rbx, Address::times_4));
1703 }
1704
1705
1706 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1707 // Call a little run-time stub to avoid blow-up for each bytecode.
1708 // The run-time runtime saves the right registers, depending on
1709 // the tosca in-state for the given template.
1710
1711 assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1712 "entry must have been generated");
1713 __ mov(r12, rsp); // remember sp (can only use r12 if not using call_VM)
|
702
703 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_growth_watermark()));
704 __ jcc(Assembler::above, L_done);
705
706 for (int p = 1; p <= n_shadow_pages; p++) {
707 __ bang_stack_with_offset(p*page_size);
708 }
709
710 // Record the new watermark, but only if update is above the safe limit.
711 // Otherwise, the next time around the check above would pass the safe limit.
712 __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
713 __ jccb(Assembler::belowEqual, L_done);
714 __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
715
716 __ bind(L_done);
717 }
718
719 // Interpreter stub for calling a native method. (asm interpreter)
720 // This sets up a somewhat different looking stack for calling the
721 // native method than the typical interpreter frame setup.
722 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
723 // determine code generation flags
724 bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
725
726 // rbx: Method*
727 // rbcp: sender sp
728
729 address entry_point = __ pc();
730
731 const Address constMethod (rbx, Method::const_offset());
732 const Address access_flags (rbx, Method::access_flags_offset());
733 const Address size_of_parameters(rcx, ConstMethod::
734 size_of_parameters_offset());
735
736
737 // get parameter size (always needed)
738 __ movptr(rcx, constMethod);
739 __ load_unsigned_short(rcx, size_of_parameters);
740
741 // native calls don't need the stack size check since they have no
742 // expression stack and the arguments are already on the stack and
743 // we only add a handful of words to the stack
744
825 }
826
827 // start execution
828 #ifdef ASSERT
829 {
830 Label L;
831 const Address monitor_block_top(rbp,
832 frame::interpreter_frame_monitor_block_top_offset * wordSize);
833 __ movptr(rax, monitor_block_top);
834 __ lea(rax, Address(rbp, rax, Address::times_ptr));
835 __ cmpptr(rax, rsp);
836 __ jcc(Assembler::equal, L);
837 __ stop("broken stack frame setup in interpreter 5");
838 __ bind(L);
839 }
840 #endif
841
842 // jvmti support
843 __ notify_method_entry();
844
845 // runtime upcalls
846 if (runtime_upcalls) {
847 __ generate_runtime_upcalls_on_method_entry();
848 }
849
850 // work registers
851 const Register method = rbx;
852 const Register thread = r15_thread;
853 const Register t = r11;
854
855 // allocate space for parameters
856 __ get_method(method);
857 __ movptr(t, Address(method, Method::const_offset()));
858 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
859
860 __ shll(t, Interpreter::logStackElementSize);
861
862 __ subptr(rsp, t);
863 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
864 __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
865
866 // get signature handler
867 {
868 Label L;
869 __ movptr(t, Address(method, Method::signature_handler_offset()));
1217 address entry_point = __ pc();
1218
1219 // abstract method entry
1220
1221 // pop return address, reset last_sp to null
1222 __ empty_expression_stack();
1223 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1224 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1225
1226 // throw exception
1227 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1228 // the call_VM checks for exception, so we should never return here.
1229 __ should_not_reach_here();
1230
1231 return entry_point;
1232 }
1233
1234 //
1235 // Generic interpreted method entry to (asm) interpreter
1236 //
1237 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1238 // determine code generation flags
1239 bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1240
1241 // ebx: Method*
1242 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1243 address entry_point = __ pc();
1244
1245 const Address constMethod(rbx, Method::const_offset());
1246 const Address access_flags(rbx, Method::access_flags_offset());
1247 const Address size_of_parameters(rdx,
1248 ConstMethod::size_of_parameters_offset());
1249 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1250
1251
1252 // get parameter size (always needed)
1253 __ movptr(rdx, constMethod);
1254 __ load_unsigned_short(rcx, size_of_parameters);
1255
1256 // rbx: Method*
1257 // rcx: size of parameters
1258 // rbcp: sender_sp (could differ from sp+wordSize if we were called via c2i )
1259
1356 }
1357
1358 // start execution
1359 #ifdef ASSERT
1360 {
1361 Label L;
1362 const Address monitor_block_top (rbp,
1363 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1364 __ movptr(rax, monitor_block_top);
1365 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1366 __ cmpptr(rax, rsp);
1367 __ jcc(Assembler::equal, L);
1368 __ stop("broken stack frame setup in interpreter 6");
1369 __ bind(L);
1370 }
1371 #endif
1372
1373 // jvmti support
1374 __ notify_method_entry();
1375
1376 if (runtime_upcalls) {
1377 __ generate_runtime_upcalls_on_method_entry();
1378 }
1379
1380 __ dispatch_next(vtos);
1381
1382 // invocation counter overflow
1383 if (inc_counter) {
1384 // Handle overflow of counter and compile method
1385 __ bind(invocation_counter_overflow);
1386 generate_counter_overflow(continue_after_compile);
1387 }
1388
1389 return entry_point;
1390 }
1391
1392 //-----------------------------------------------------------------------------
1393 // Exceptions
1394
1395 void TemplateInterpreterGenerator::generate_throw_exception() {
1396 // Entry point in previous activation (i.e., if the caller was
1397 // interpreted)
1398 Interpreter::_rethrow_exception_entry = __ pc();
1399 // Restore sp to interpreter_frame_last_sp even though we are going
1674 __ push(c_rarg0);
1675 __ push(c_rarg1);
1676 __ push(c_rarg2);
1677 __ push(c_rarg3);
1678 __ mov(c_rarg2, rax); // Pass itos
1679 #ifdef _WIN64
1680 __ movflt(xmm3, xmm0); // Pass ftos
1681 #endif
1682 __ call_VM(noreg,
1683 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1684 c_rarg1, c_rarg2, c_rarg3);
1685 __ pop(c_rarg3);
1686 __ pop(c_rarg2);
1687 __ pop(c_rarg1);
1688 __ pop(c_rarg0);
1689 __ pop(state);
1690 __ ret(0); // return from result handler
1691
1692 return entry;
1693 }
1694 #endif // PRODUCT
1695
1696 void TemplateInterpreterGenerator::count_bytecode() {
1697 __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1698 }
1699
1700 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1701 __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1702 }
1703
1704 #ifndef PRODUCT
1705 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1706 __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1707 __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1708 __ orl(rbx,
1709 ((int) t->bytecode()) <<
1710 BytecodePairHistogram::log2_number_of_codes);
1711 __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1712 __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1713 __ incrementl(Address(rscratch1, rbx, Address::times_4));
1714 }
1715
1716
1717 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1718 // Call a little run-time stub to avoid blow-up for each bytecode.
1719 // The run-time runtime saves the right registers, depending on
1720 // the tosca in-state for the given template.
1721
1722 assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1723 "entry must have been generated");
1724 __ mov(r12, rsp); // remember sp (can only use r12 if not using call_VM)
|