< prev index next >

src/hotspot/cpu/x86/templateInterpreterGenerator_x86.cpp

Print this page

 702 
 703   __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_growth_watermark()));
 704   __ jcc(Assembler::above, L_done);
 705 
 706   for (int p = 1; p <= n_shadow_pages; p++) {
 707     __ bang_stack_with_offset(p*page_size);
 708   }
 709 
 710   // Record the new watermark, but only if update is above the safe limit.
 711   // Otherwise, the next time around the check above would pass the safe limit.
 712   __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
 713   __ jccb(Assembler::belowEqual, L_done);
 714   __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
 715 
 716   __ bind(L_done);
 717 }
 718 
 719 // Interpreter stub for calling a native method. (asm interpreter)
 720 // This sets up a somewhat different looking stack for calling the
 721 // native method than the typical interpreter frame setup.
 722 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
 723   // determine code generation flags
 724   bool inc_counter  = UseCompiler || CountCompiledCalls;
 725 
 726   // rbx: Method*
 727   // rbcp: sender sp
 728 
 729   address entry_point = __ pc();
 730 
 731   const Address constMethod       (rbx, Method::const_offset());
 732   const Address access_flags      (rbx, Method::access_flags_offset());
 733   const Address size_of_parameters(rcx, ConstMethod::
 734                                         size_of_parameters_offset());
 735 
 736 
 737   // get parameter size (always needed)
 738   __ movptr(rcx, constMethod);
 739   __ load_unsigned_short(rcx, size_of_parameters);
 740 
 741   // native calls don't need the stack size check since they have no
 742   // expression stack and the arguments are already on the stack and
 743   // we only add a handful of words to the stack
 744 

 825   }
 826 
 827   // start execution
 828 #ifdef ASSERT
 829   {
 830     Label L;
 831     const Address monitor_block_top(rbp,
 832                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
 833     __ movptr(rax, monitor_block_top);
 834     __ lea(rax, Address(rbp, rax, Address::times_ptr));
 835     __ cmpptr(rax, rsp);
 836     __ jcc(Assembler::equal, L);
 837     __ stop("broken stack frame setup in interpreter 5");
 838     __ bind(L);
 839   }
 840 #endif
 841 
 842   // jvmti support
 843   __ notify_method_entry();
 844 





 845   // work registers
 846   const Register method = rbx;
 847   const Register thread = r15_thread;
 848   const Register t      = r11;
 849 
 850   // allocate space for parameters
 851   __ get_method(method);
 852   __ movptr(t, Address(method, Method::const_offset()));
 853   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
 854 
 855   __ shll(t, Interpreter::logStackElementSize);
 856 
 857   __ subptr(rsp, t);
 858   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
 859   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
 860 
 861   // get signature handler
 862   {
 863     Label L;
 864     __ movptr(t, Address(method, Method::signature_handler_offset()));

1189   address entry_point = __ pc();
1190 
1191   // abstract method entry
1192 
1193   //  pop return address, reset last_sp to null
1194   __ empty_expression_stack();
1195   __ restore_bcp();      // rsi must be correct for exception handler   (was destroyed)
1196   __ restore_locals();   // make sure locals pointer is correct as well (was destroyed)
1197 
1198   // throw exception
1199   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1200   // the call_VM checks for exception, so we should never return here.
1201   __ should_not_reach_here();
1202 
1203   return entry_point;
1204 }
1205 
1206 //
1207 // Generic interpreted method entry to (asm) interpreter
1208 //
1209 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1210   // determine code generation flags
1211   bool inc_counter  = UseCompiler || CountCompiledCalls;
1212 
1213   // ebx: Method*
1214   // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1215   address entry_point = __ pc();
1216 
1217   const Address constMethod(rbx, Method::const_offset());
1218   const Address access_flags(rbx, Method::access_flags_offset());
1219   const Address size_of_parameters(rdx,
1220                                    ConstMethod::size_of_parameters_offset());
1221   const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1222 
1223 
1224   // get parameter size (always needed)
1225   __ movptr(rdx, constMethod);
1226   __ load_unsigned_short(rcx, size_of_parameters);
1227 
1228   // rbx: Method*
1229   // rcx: size of parameters
1230   // rbcp: sender_sp (could differ from sp+wordSize if we were called via c2i )
1231 

1328   }
1329 
1330   // start execution
1331 #ifdef ASSERT
1332   {
1333     Label L;
1334      const Address monitor_block_top (rbp,
1335                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1336     __ movptr(rax, monitor_block_top);
1337     __ lea(rax, Address(rbp, rax, Address::times_ptr));
1338     __ cmpptr(rax, rsp);
1339     __ jcc(Assembler::equal, L);
1340     __ stop("broken stack frame setup in interpreter 6");
1341     __ bind(L);
1342   }
1343 #endif
1344 
1345   // jvmti support
1346   __ notify_method_entry();
1347 




1348   __ dispatch_next(vtos);
1349 
1350   // invocation counter overflow
1351   if (inc_counter) {
1352     // Handle overflow of counter and compile method
1353     __ bind(invocation_counter_overflow);
1354     generate_counter_overflow(continue_after_compile);
1355   }
1356 
1357   return entry_point;
1358 }
1359 
1360 //-----------------------------------------------------------------------------
1361 // Exceptions
1362 
1363 void TemplateInterpreterGenerator::generate_throw_exception() {
1364   // Entry point in previous activation (i.e., if the caller was
1365   // interpreted)
1366   Interpreter::_rethrow_exception_entry = __ pc();
1367   // Restore sp to interpreter_frame_last_sp even though we are going

1641   __ push(c_rarg0);
1642   __ push(c_rarg1);
1643   __ push(c_rarg2);
1644   __ push(c_rarg3);
1645   __ mov(c_rarg2, rax);  // Pass itos
1646 #ifdef _WIN64
1647   __ movflt(xmm3, xmm0); // Pass ftos
1648 #endif
1649   __ call_VM(noreg,
1650              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1651              c_rarg1, c_rarg2, c_rarg3);
1652   __ pop(c_rarg3);
1653   __ pop(c_rarg2);
1654   __ pop(c_rarg1);
1655   __ pop(c_rarg0);
1656   __ pop(state);
1657   __ ret(0);                                   // return from result handler
1658 
1659   return entry;
1660 }

1661 
1662 void TemplateInterpreterGenerator::count_bytecode() {
1663   __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1664 }
1665 
1666 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1667   __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1668 }
1669 

1670 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1671   __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1672   __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1673   __ orl(rbx,
1674          ((int) t->bytecode()) <<
1675          BytecodePairHistogram::log2_number_of_codes);
1676   __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1677   __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1678   __ incrementl(Address(rscratch1, rbx, Address::times_4));
1679 }
1680 
1681 
1682 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1683   // Call a little run-time stub to avoid blow-up for each bytecode.
1684   // The run-time runtime saves the right registers, depending on
1685   // the tosca in-state for the given template.
1686 
1687   assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1688          "entry must have been generated");
1689   __ mov(r12, rsp); // remember sp (can only use r12 if not using call_VM)

 702 
 703   __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_growth_watermark()));
 704   __ jcc(Assembler::above, L_done);
 705 
 706   for (int p = 1; p <= n_shadow_pages; p++) {
 707     __ bang_stack_with_offset(p*page_size);
 708   }
 709 
 710   // Record the new watermark, but only if update is above the safe limit.
 711   // Otherwise, the next time around the check above would pass the safe limit.
 712   __ cmpptr(rsp, Address(thread, JavaThread::shadow_zone_safe_limit()));
 713   __ jccb(Assembler::belowEqual, L_done);
 714   __ movptr(Address(thread, JavaThread::shadow_zone_growth_watermark()), rsp);
 715 
 716   __ bind(L_done);
 717 }
 718 
 719 // Interpreter stub for calling a native method. (asm interpreter)
 720 // This sets up a somewhat different looking stack for calling the
 721 // native method than the typical interpreter frame setup.
 722 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
 723   // determine code generation flags
 724   bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
 725 
 726   // rbx: Method*
 727   // rbcp: sender sp
 728 
 729   address entry_point = __ pc();
 730 
 731   const Address constMethod       (rbx, Method::const_offset());
 732   const Address access_flags      (rbx, Method::access_flags_offset());
 733   const Address size_of_parameters(rcx, ConstMethod::
 734                                         size_of_parameters_offset());
 735 
 736 
 737   // get parameter size (always needed)
 738   __ movptr(rcx, constMethod);
 739   __ load_unsigned_short(rcx, size_of_parameters);
 740 
 741   // native calls don't need the stack size check since they have no
 742   // expression stack and the arguments are already on the stack and
 743   // we only add a handful of words to the stack
 744 

 825   }
 826 
 827   // start execution
 828 #ifdef ASSERT
 829   {
 830     Label L;
 831     const Address monitor_block_top(rbp,
 832                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
 833     __ movptr(rax, monitor_block_top);
 834     __ lea(rax, Address(rbp, rax, Address::times_ptr));
 835     __ cmpptr(rax, rsp);
 836     __ jcc(Assembler::equal, L);
 837     __ stop("broken stack frame setup in interpreter 5");
 838     __ bind(L);
 839   }
 840 #endif
 841 
 842   // jvmti support
 843   __ notify_method_entry();
 844 
 845   // runtime upcalls
 846   if (runtime_upcalls) {
 847     __ generate_runtime_upcalls_on_method_entry();
 848   }
 849 
 850   // work registers
 851   const Register method = rbx;
 852   const Register thread = r15_thread;
 853   const Register t      = r11;
 854 
 855   // allocate space for parameters
 856   __ get_method(method);
 857   __ movptr(t, Address(method, Method::const_offset()));
 858   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
 859 
 860   __ shll(t, Interpreter::logStackElementSize);
 861 
 862   __ subptr(rsp, t);
 863   __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
 864   __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
 865 
 866   // get signature handler
 867   {
 868     Label L;
 869     __ movptr(t, Address(method, Method::signature_handler_offset()));

1194   address entry_point = __ pc();
1195 
1196   // abstract method entry
1197 
1198   //  pop return address, reset last_sp to null
1199   __ empty_expression_stack();
1200   __ restore_bcp();      // rsi must be correct for exception handler   (was destroyed)
1201   __ restore_locals();   // make sure locals pointer is correct as well (was destroyed)
1202 
1203   // throw exception
1204   __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1205   // the call_VM checks for exception, so we should never return here.
1206   __ should_not_reach_here();
1207 
1208   return entry_point;
1209 }
1210 
1211 //
1212 // Generic interpreted method entry to (asm) interpreter
1213 //
1214 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1215   // determine code generation flags
1216   bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1217 
1218   // ebx: Method*
1219   // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1220   address entry_point = __ pc();
1221 
1222   const Address constMethod(rbx, Method::const_offset());
1223   const Address access_flags(rbx, Method::access_flags_offset());
1224   const Address size_of_parameters(rdx,
1225                                    ConstMethod::size_of_parameters_offset());
1226   const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1227 
1228 
1229   // get parameter size (always needed)
1230   __ movptr(rdx, constMethod);
1231   __ load_unsigned_short(rcx, size_of_parameters);
1232 
1233   // rbx: Method*
1234   // rcx: size of parameters
1235   // rbcp: sender_sp (could differ from sp+wordSize if we were called via c2i )
1236 

1333   }
1334 
1335   // start execution
1336 #ifdef ASSERT
1337   {
1338     Label L;
1339      const Address monitor_block_top (rbp,
1340                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1341     __ movptr(rax, monitor_block_top);
1342     __ lea(rax, Address(rbp, rax, Address::times_ptr));
1343     __ cmpptr(rax, rsp);
1344     __ jcc(Assembler::equal, L);
1345     __ stop("broken stack frame setup in interpreter 6");
1346     __ bind(L);
1347   }
1348 #endif
1349 
1350   // jvmti support
1351   __ notify_method_entry();
1352 
1353   if (runtime_upcalls) {
1354     __ generate_runtime_upcalls_on_method_entry();
1355   }
1356 
1357   __ dispatch_next(vtos);
1358 
1359   // invocation counter overflow
1360   if (inc_counter) {
1361     // Handle overflow of counter and compile method
1362     __ bind(invocation_counter_overflow);
1363     generate_counter_overflow(continue_after_compile);
1364   }
1365 
1366   return entry_point;
1367 }
1368 
1369 //-----------------------------------------------------------------------------
1370 // Exceptions
1371 
1372 void TemplateInterpreterGenerator::generate_throw_exception() {
1373   // Entry point in previous activation (i.e., if the caller was
1374   // interpreted)
1375   Interpreter::_rethrow_exception_entry = __ pc();
1376   // Restore sp to interpreter_frame_last_sp even though we are going

1650   __ push(c_rarg0);
1651   __ push(c_rarg1);
1652   __ push(c_rarg2);
1653   __ push(c_rarg3);
1654   __ mov(c_rarg2, rax);  // Pass itos
1655 #ifdef _WIN64
1656   __ movflt(xmm3, xmm0); // Pass ftos
1657 #endif
1658   __ call_VM(noreg,
1659              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
1660              c_rarg1, c_rarg2, c_rarg3);
1661   __ pop(c_rarg3);
1662   __ pop(c_rarg2);
1663   __ pop(c_rarg1);
1664   __ pop(c_rarg0);
1665   __ pop(state);
1666   __ ret(0);                                   // return from result handler
1667 
1668   return entry;
1669 }
1670 #endif // PRODUCT
1671 
1672 void TemplateInterpreterGenerator::count_bytecode() {
1673   __ incrementq(ExternalAddress((address) &BytecodeCounter::_counter_value), rscratch1);
1674 }
1675 
1676 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
1677   __ incrementl(ExternalAddress((address) &BytecodeHistogram::_counters[t->bytecode()]), rscratch1);
1678 }
1679 
1680 #ifndef PRODUCT
1681 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
1682   __ mov32(rbx, ExternalAddress((address) &BytecodePairHistogram::_index));
1683   __ shrl(rbx, BytecodePairHistogram::log2_number_of_codes);
1684   __ orl(rbx,
1685          ((int) t->bytecode()) <<
1686          BytecodePairHistogram::log2_number_of_codes);
1687   __ mov32(ExternalAddress((address) &BytecodePairHistogram::_index), rbx, rscratch1);
1688   __ lea(rscratch1, ExternalAddress((address) BytecodePairHistogram::_counters));
1689   __ incrementl(Address(rscratch1, rbx, Address::times_4));
1690 }
1691 
1692 
1693 void TemplateInterpreterGenerator::trace_bytecode(Template* t) {
1694   // Call a little run-time stub to avoid blow-up for each bytecode.
1695   // The run-time runtime saves the right registers, depending on
1696   // the tosca in-state for the given template.
1697 
1698   assert(Interpreter::trace_code(t->tos_in()) != nullptr,
1699          "entry must have been generated");
1700   __ mov(r12, rsp); // remember sp (can only use r12 if not using call_VM)
< prev index next >