< prev index next >

src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp

Print this page

1167 
1168   for (int p = 1; p <= n_shadow_pages; p++) {
1169     __ sub(rscratch2, sp, p*page_size);
1170     __ str(zr, Address(rscratch2));
1171   }
1172 
1173   // Record the new watermark, but only if the update is above the safe limit.
1174   // Otherwise, the next time around the check above would pass the safe limit.
1175   __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1176   __ cmp(sp, rscratch1);
1177   __ br(Assembler::LS, L_done);
1178   __ mov(rscratch1, sp);
1179   __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1180 
1181   __ bind(L_done);
1182 }
1183 
1184 // Interpreter stub for calling a native method. (asm interpreter)
1185 // This sets up a somewhat different looking stack for calling the
1186 // native method than the typical interpreter frame setup.
1187 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
1188   // determine code generation flags
1189   bool inc_counter  = UseCompiler || CountCompiledCalls;
1190 
1191   // r1: Method*
1192   // rscratch1: sender sp
1193 
1194   address entry_point = __ pc();
1195 
1196   const Address constMethod       (rmethod, Method::const_offset());
1197   const Address access_flags      (rmethod, Method::access_flags_offset());
1198   const Address size_of_parameters(r2, ConstMethod::
1199                                        size_of_parameters_offset());
1200 
1201   // get parameter size (always needed)
1202   __ ldr(r2, constMethod);
1203   __ load_unsigned_short(r2, size_of_parameters);
1204 
1205   // Native calls don't need the stack size check since they have no
1206   // expression stack and the arguments are already on the stack and
1207   // we only add a handful of words to the stack.
1208 
1209   // rmethod: Method*

1286   }
1287 
1288   // start execution
1289 #ifdef ASSERT
1290   {
1291     Label L;
1292     const Address monitor_block_top(rfp,
1293                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1294     __ ldr(rscratch1, monitor_block_top);
1295     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1296     __ cmp(esp, rscratch1);
1297     __ br(Assembler::EQ, L);
1298     __ stop("broken stack frame setup in interpreter 1");
1299     __ bind(L);
1300   }
1301 #endif
1302 
1303   // jvmti support
1304   __ notify_method_entry();
1305 




1306   // work registers
1307   const Register t = r17;
1308   const Register result_handler = r19;
1309 
1310   // allocate space for parameters
1311   __ ldr(t, Address(rmethod, Method::const_offset()));
1312   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1313 
1314   __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1315   __ andr(sp, rscratch1, -16);
1316   __ mov(esp, rscratch1);
1317 
1318   // get signature handler
1319   {
1320     Label L;
1321     __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1322     __ cbnz(t, L);
1323     __ call_VM(noreg,
1324                CAST_FROM_FN_PTR(address,
1325                                 InterpreterRuntime::prepare_native_call),

1641 
1642   JFR_ONLY(__ leave_jfr_critical_section();)
1643 
1644   // restore sender sp
1645   __ mov(sp, esp);
1646 
1647   __ ret(lr);
1648 
1649   if (inc_counter) {
1650     // Handle overflow of counter and compile method
1651     __ bind(invocation_counter_overflow);
1652     generate_counter_overflow(continue_after_compile);
1653   }
1654 
1655   return entry_point;
1656 }
1657 
1658 //
1659 // Generic interpreted method entry to (asm) interpreter
1660 //
1661 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1662   // determine code generation flags
1663   bool inc_counter  = UseCompiler || CountCompiledCalls;
1664 
1665   // rscratch1: sender sp
1666   address entry_point = __ pc();
1667 
1668   const Address constMethod(rmethod, Method::const_offset());
1669   const Address access_flags(rmethod, Method::access_flags_offset());
1670   const Address size_of_parameters(r3,
1671                                    ConstMethod::size_of_parameters_offset());
1672   const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1673 
1674   // get parameter size (always needed)
1675   // need to load the const method first
1676   __ ldr(r3, constMethod);
1677   __ load_unsigned_short(r2, size_of_parameters);
1678 
1679   // r2: size of parameters
1680 
1681   __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1682   __ sub(r3, r3, r2); // r3 = no. of additional locals
1683 

1786   }
1787 
1788   // start execution
1789 #ifdef ASSERT
1790   {
1791     Label L;
1792      const Address monitor_block_top (rfp,
1793                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1794     __ ldr(rscratch1, monitor_block_top);
1795     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1796     __ cmp(esp, rscratch1);
1797     __ br(Assembler::EQ, L);
1798     __ stop("broken stack frame setup in interpreter 2");
1799     __ bind(L);
1800   }
1801 #endif
1802 
1803   // jvmti support
1804   __ notify_method_entry();
1805 





1806   __ dispatch_next(vtos);
1807 
1808   // invocation counter overflow
1809   if (inc_counter) {
1810     // Handle overflow of counter and compile method
1811     __ bind(invocation_counter_overflow);
1812     generate_counter_overflow(continue_after_compile);
1813   }
1814 
1815   return entry_point;
1816 }
1817 
1818 // Method entry for java.lang.Thread.currentThread
1819 address TemplateInterpreterGenerator::generate_currentThread() {
1820   address entry_point = __ pc();
1821 
1822   __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1823   __ resolve_oop_handle(r0, rscratch1, rscratch2);
1824   __ ret(lr);
1825 

2102 #ifndef PRODUCT
2103 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2104   address entry = __ pc();
2105 
2106   __ protect_return_address();
2107   __ push(lr);
2108   __ push(state);
2109   __ push(RegSet::range(r0, r15), sp);
2110   __ mov(c_rarg2, r0);  // Pass itos
2111   __ call_VM(noreg,
2112              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2113              c_rarg1, c_rarg2, c_rarg3);
2114   __ pop(RegSet::range(r0, r15), sp);
2115   __ pop(state);
2116   __ pop(lr);
2117   __ authenticate_return_address();
2118   __ ret(lr);                                   // return from result handler
2119 
2120   return entry;
2121 }

2122 
2123 void TemplateInterpreterGenerator::count_bytecode() {
2124   __ mov(r10, (address) &BytecodeCounter::_counter_value);
2125   __ atomic_add(noreg, 1, r10);








2126 }
2127 
2128 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2129   __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2130   __ atomic_addw(noreg, 1, r10);
2131 }
2132 

2133 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2134   // Calculate new index for counter:
2135   //   _index = (_index >> log2_number_of_codes) |
2136   //            (bytecode << log2_number_of_codes);
2137   Register index_addr = rscratch1;
2138   Register index = rscratch2;
2139   __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2140   __ ldrw(index, index_addr);
2141   __ mov(r10,
2142          ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2143   __ orrw(index, r10, index, Assembler::LSR,
2144           BytecodePairHistogram::log2_number_of_codes);
2145   __ strw(index, index_addr);
2146 
2147   // Bump bucket contents:
2148   //   _counters[_index] ++;
2149   Register counter_addr = rscratch1;
2150   __ mov(r10, (address) &BytecodePairHistogram::_counters);
2151   __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2152   __ atomic_addw(noreg, 1, counter_addr);

1167 
1168   for (int p = 1; p <= n_shadow_pages; p++) {
1169     __ sub(rscratch2, sp, p*page_size);
1170     __ str(zr, Address(rscratch2));
1171   }
1172 
1173   // Record the new watermark, but only if the update is above the safe limit.
1174   // Otherwise, the next time around the check above would pass the safe limit.
1175   __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1176   __ cmp(sp, rscratch1);
1177   __ br(Assembler::LS, L_done);
1178   __ mov(rscratch1, sp);
1179   __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1180 
1181   __ bind(L_done);
1182 }
1183 
1184 // Interpreter stub for calling a native method. (asm interpreter)
1185 // This sets up a somewhat different looking stack for calling the
1186 // native method than the typical interpreter frame setup.
1187 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
1188   // determine code generation flags
1189   bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1190 
1191   // r1: Method*
1192   // rscratch1: sender sp
1193 
1194   address entry_point = __ pc();
1195 
1196   const Address constMethod       (rmethod, Method::const_offset());
1197   const Address access_flags      (rmethod, Method::access_flags_offset());
1198   const Address size_of_parameters(r2, ConstMethod::
1199                                        size_of_parameters_offset());
1200 
1201   // get parameter size (always needed)
1202   __ ldr(r2, constMethod);
1203   __ load_unsigned_short(r2, size_of_parameters);
1204 
1205   // Native calls don't need the stack size check since they have no
1206   // expression stack and the arguments are already on the stack and
1207   // we only add a handful of words to the stack.
1208 
1209   // rmethod: Method*

1286   }
1287 
1288   // start execution
1289 #ifdef ASSERT
1290   {
1291     Label L;
1292     const Address monitor_block_top(rfp,
1293                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1294     __ ldr(rscratch1, monitor_block_top);
1295     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1296     __ cmp(esp, rscratch1);
1297     __ br(Assembler::EQ, L);
1298     __ stop("broken stack frame setup in interpreter 1");
1299     __ bind(L);
1300   }
1301 #endif
1302 
1303   // jvmti support
1304   __ notify_method_entry();
1305 
1306   if (runtime_upcalls) {
1307     __ generate_runtime_upcalls_on_method_entry();
1308   }
1309 
1310   // work registers
1311   const Register t = r17;
1312   const Register result_handler = r19;
1313 
1314   // allocate space for parameters
1315   __ ldr(t, Address(rmethod, Method::const_offset()));
1316   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1317 
1318   __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1319   __ andr(sp, rscratch1, -16);
1320   __ mov(esp, rscratch1);
1321 
1322   // get signature handler
1323   {
1324     Label L;
1325     __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1326     __ cbnz(t, L);
1327     __ call_VM(noreg,
1328                CAST_FROM_FN_PTR(address,
1329                                 InterpreterRuntime::prepare_native_call),

1645 
1646   JFR_ONLY(__ leave_jfr_critical_section();)
1647 
1648   // restore sender sp
1649   __ mov(sp, esp);
1650 
1651   __ ret(lr);
1652 
1653   if (inc_counter) {
1654     // Handle overflow of counter and compile method
1655     __ bind(invocation_counter_overflow);
1656     generate_counter_overflow(continue_after_compile);
1657   }
1658 
1659   return entry_point;
1660 }
1661 
1662 //
1663 // Generic interpreted method entry to (asm) interpreter
1664 //
1665 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1666   // determine code generation flags
1667   bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1668 
1669   // rscratch1: sender sp
1670   address entry_point = __ pc();
1671 
1672   const Address constMethod(rmethod, Method::const_offset());
1673   const Address access_flags(rmethod, Method::access_flags_offset());
1674   const Address size_of_parameters(r3,
1675                                    ConstMethod::size_of_parameters_offset());
1676   const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1677 
1678   // get parameter size (always needed)
1679   // need to load the const method first
1680   __ ldr(r3, constMethod);
1681   __ load_unsigned_short(r2, size_of_parameters);
1682 
1683   // r2: size of parameters
1684 
1685   __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1686   __ sub(r3, r3, r2); // r3 = no. of additional locals
1687 

1790   }
1791 
1792   // start execution
1793 #ifdef ASSERT
1794   {
1795     Label L;
1796      const Address monitor_block_top (rfp,
1797                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1798     __ ldr(rscratch1, monitor_block_top);
1799     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1800     __ cmp(esp, rscratch1);
1801     __ br(Assembler::EQ, L);
1802     __ stop("broken stack frame setup in interpreter 2");
1803     __ bind(L);
1804   }
1805 #endif
1806 
1807   // jvmti support
1808   __ notify_method_entry();
1809 
1810   // runtime upcalls
1811   if (runtime_upcalls) {
1812     __ generate_runtime_upcalls_on_method_entry();
1813   }
1814 
1815   __ dispatch_next(vtos);
1816 
1817   // invocation counter overflow
1818   if (inc_counter) {
1819     // Handle overflow of counter and compile method
1820     __ bind(invocation_counter_overflow);
1821     generate_counter_overflow(continue_after_compile);
1822   }
1823 
1824   return entry_point;
1825 }
1826 
1827 // Method entry for java.lang.Thread.currentThread
1828 address TemplateInterpreterGenerator::generate_currentThread() {
1829   address entry_point = __ pc();
1830 
1831   __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1832   __ resolve_oop_handle(r0, rscratch1, rscratch2);
1833   __ ret(lr);
1834 

2111 #ifndef PRODUCT
2112 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2113   address entry = __ pc();
2114 
2115   __ protect_return_address();
2116   __ push(lr);
2117   __ push(state);
2118   __ push(RegSet::range(r0, r15), sp);
2119   __ mov(c_rarg2, r0);  // Pass itos
2120   __ call_VM(noreg,
2121              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2122              c_rarg1, c_rarg2, c_rarg3);
2123   __ pop(RegSet::range(r0, r15), sp);
2124   __ pop(state);
2125   __ pop(lr);
2126   __ authenticate_return_address();
2127   __ ret(lr);                                   // return from result handler
2128 
2129   return entry;
2130 }
2131 #endif // PRODUCT
2132 
2133 void TemplateInterpreterGenerator::count_bytecode() {
2134   if (CountBytecodesPerThread) {
2135     Address bc_counter_addr(rthread, Thread::bc_counter_offset());
2136     __ ldr(r10, bc_counter_addr);
2137     __ add(r10, r10, 1);
2138     __ str(r10, bc_counter_addr);
2139   }
2140   if (CountBytecodes || TraceBytecodes || StopInterpreterAt > 0) {
2141     __ mov(r10, (address) &BytecodeCounter::_counter_value);
2142     __ atomic_add(noreg, 1, r10);
2143   }
2144 }
2145 
2146 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2147   __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2148   __ atomic_addw(noreg, 1, r10);
2149 }
2150 
2151 #ifndef PRODUCT
2152 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2153   // Calculate new index for counter:
2154   //   _index = (_index >> log2_number_of_codes) |
2155   //            (bytecode << log2_number_of_codes);
2156   Register index_addr = rscratch1;
2157   Register index = rscratch2;
2158   __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2159   __ ldrw(index, index_addr);
2160   __ mov(r10,
2161          ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2162   __ orrw(index, r10, index, Assembler::LSR,
2163           BytecodePairHistogram::log2_number_of_codes);
2164   __ strw(index, index_addr);
2165 
2166   // Bump bucket contents:
2167   //   _counters[_index] ++;
2168   Register counter_addr = rscratch1;
2169   __ mov(r10, (address) &BytecodePairHistogram::_counters);
2170   __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2171   __ atomic_addw(noreg, 1, counter_addr);
< prev index next >