< prev index next >

src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp

Print this page

1172 
1173   for (int p = 1; p <= n_shadow_pages; p++) {
1174     __ sub(rscratch2, sp, p*page_size);
1175     __ str(zr, Address(rscratch2));
1176   }
1177 
1178   // Record the new watermark, but only if the update is above the safe limit.
1179   // Otherwise, the next time around the check above would pass the safe limit.
1180   __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1181   __ cmp(sp, rscratch1);
1182   __ br(Assembler::LS, L_done);
1183   __ mov(rscratch1, sp);
1184   __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1185 
1186   __ bind(L_done);
1187 }
1188 
1189 // Interpreter stub for calling a native method. (asm interpreter)
1190 // This sets up a somewhat different looking stack for calling the
1191 // native method than the typical interpreter frame setup.
1192 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
1193   // determine code generation flags
1194   bool inc_counter  = UseCompiler || CountCompiledCalls;
1195 
1196   // r1: Method*
1197   // rscratch1: sender sp
1198 
1199   address entry_point = __ pc();
1200 
1201   const Address constMethod       (rmethod, Method::const_offset());
1202   const Address access_flags      (rmethod, Method::access_flags_offset());
1203   const Address size_of_parameters(r2, ConstMethod::
1204                                        size_of_parameters_offset());
1205 
1206   // get parameter size (always needed)
1207   __ ldr(r2, constMethod);
1208   __ load_unsigned_short(r2, size_of_parameters);
1209 
1210   // Native calls don't need the stack size check since they have no
1211   // expression stack and the arguments are already on the stack and
1212   // we only add a handful of words to the stack.
1213 
1214   // rmethod: Method*

1291   }
1292 
1293   // start execution
1294 #ifdef ASSERT
1295   {
1296     Label L;
1297     const Address monitor_block_top(rfp,
1298                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1299     __ ldr(rscratch1, monitor_block_top);
1300     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1301     __ cmp(esp, rscratch1);
1302     __ br(Assembler::EQ, L);
1303     __ stop("broken stack frame setup in interpreter 1");
1304     __ bind(L);
1305   }
1306 #endif
1307 
1308   // jvmti support
1309   __ notify_method_entry();
1310 




1311   // work registers
1312   const Register t = r17;
1313   const Register result_handler = r19;
1314 
1315   // allocate space for parameters
1316   __ ldr(t, Address(rmethod, Method::const_offset()));
1317   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1318 
1319   __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1320   __ andr(sp, rscratch1, -16);
1321   __ mov(esp, rscratch1);
1322 
1323   // get signature handler
1324   {
1325     Label L;
1326     __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1327     __ cbnz(t, L);
1328     __ call_VM(noreg,
1329                CAST_FROM_FN_PTR(address,
1330                                 InterpreterRuntime::prepare_native_call),

1641 
1642   JFR_ONLY(__ leave_jfr_critical_section();)
1643 
1644   // restore sender sp
1645   __ mov(sp, esp);
1646 
1647   __ ret(lr);
1648 
1649   if (inc_counter) {
1650     // Handle overflow of counter and compile method
1651     __ bind(invocation_counter_overflow);
1652     generate_counter_overflow(continue_after_compile);
1653   }
1654 
1655   return entry_point;
1656 }
1657 
1658 //
1659 // Generic interpreted method entry to (asm) interpreter
1660 //
1661 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1662   // determine code generation flags
1663   bool inc_counter  = UseCompiler || CountCompiledCalls;
1664 
1665   // rscratch1: sender sp
1666   address entry_point = __ pc();
1667 
1668   const Address constMethod(rmethod, Method::const_offset());
1669   const Address access_flags(rmethod, Method::access_flags_offset());
1670   const Address size_of_parameters(r3,
1671                                    ConstMethod::size_of_parameters_offset());
1672   const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1673 
1674   // get parameter size (always needed)
1675   // need to load the const method first
1676   __ ldr(r3, constMethod);
1677   __ load_unsigned_short(r2, size_of_parameters);
1678 
1679   // r2: size of parameters
1680 
1681   __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1682   __ sub(r3, r3, r2); // r3 = no. of additional locals
1683 

1786   }
1787 
1788   // start execution
1789 #ifdef ASSERT
1790   {
1791     Label L;
1792      const Address monitor_block_top (rfp,
1793                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1794     __ ldr(rscratch1, monitor_block_top);
1795     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1796     __ cmp(esp, rscratch1);
1797     __ br(Assembler::EQ, L);
1798     __ stop("broken stack frame setup in interpreter 2");
1799     __ bind(L);
1800   }
1801 #endif
1802 
1803   // jvmti support
1804   __ notify_method_entry();
1805 





1806   __ dispatch_next(vtos);
1807 
1808   // invocation counter overflow
1809   if (inc_counter) {
1810     // Handle overflow of counter and compile method
1811     __ bind(invocation_counter_overflow);
1812     generate_counter_overflow(continue_after_compile);
1813   }
1814 
1815   return entry_point;
1816 }
1817 
1818 // Method entry for java.lang.Thread.currentThread
1819 address TemplateInterpreterGenerator::generate_currentThread() {
1820   address entry_point = __ pc();
1821 
1822   __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1823   __ resolve_oop_handle(r0, rscratch1, rscratch2);
1824   __ ret(lr);
1825 

2103 #ifndef PRODUCT
2104 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2105   address entry = __ pc();
2106 
2107   __ protect_return_address();
2108   __ push(lr);
2109   __ push(state);
2110   __ push(RegSet::range(r0, r15), sp);
2111   __ mov(c_rarg2, r0);  // Pass itos
2112   __ call_VM(noreg,
2113              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2114              c_rarg1, c_rarg2, c_rarg3);
2115   __ pop(RegSet::range(r0, r15), sp);
2116   __ pop(state);
2117   __ pop(lr);
2118   __ authenticate_return_address();
2119   __ ret(lr);                                   // return from result handler
2120 
2121   return entry;
2122 }

2123 
2124 void TemplateInterpreterGenerator::count_bytecode() {
2125   __ mov(r10, (address) &BytecodeCounter::_counter_value);
2126   __ atomic_add(noreg, 1, r10);








2127 }
2128 
2129 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2130   __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2131   __ atomic_addw(noreg, 1, r10);
2132 }
2133 

2134 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2135   // Calculate new index for counter:
2136   //   _index = (_index >> log2_number_of_codes) |
2137   //            (bytecode << log2_number_of_codes);
2138   Register index_addr = rscratch1;
2139   Register index = rscratch2;
2140   __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2141   __ ldrw(index, index_addr);
2142   __ mov(r10,
2143          ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2144   __ orrw(index, r10, index, Assembler::LSR,
2145           BytecodePairHistogram::log2_number_of_codes);
2146   __ strw(index, index_addr);
2147 
2148   // Bump bucket contents:
2149   //   _counters[_index] ++;
2150   Register counter_addr = rscratch1;
2151   __ mov(r10, (address) &BytecodePairHistogram::_counters);
2152   __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2153   __ atomic_addw(noreg, 1, counter_addr);

1172 
1173   for (int p = 1; p <= n_shadow_pages; p++) {
1174     __ sub(rscratch2, sp, p*page_size);
1175     __ str(zr, Address(rscratch2));
1176   }
1177 
1178   // Record the new watermark, but only if the update is above the safe limit.
1179   // Otherwise, the next time around the check above would pass the safe limit.
1180   __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1181   __ cmp(sp, rscratch1);
1182   __ br(Assembler::LS, L_done);
1183   __ mov(rscratch1, sp);
1184   __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1185 
1186   __ bind(L_done);
1187 }
1188 
1189 // Interpreter stub for calling a native method. (asm interpreter)
1190 // This sets up a somewhat different looking stack for calling the
1191 // native method than the typical interpreter frame setup.
1192 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
1193   // determine code generation flags
1194   bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1195 
1196   // r1: Method*
1197   // rscratch1: sender sp
1198 
1199   address entry_point = __ pc();
1200 
1201   const Address constMethod       (rmethod, Method::const_offset());
1202   const Address access_flags      (rmethod, Method::access_flags_offset());
1203   const Address size_of_parameters(r2, ConstMethod::
1204                                        size_of_parameters_offset());
1205 
1206   // get parameter size (always needed)
1207   __ ldr(r2, constMethod);
1208   __ load_unsigned_short(r2, size_of_parameters);
1209 
1210   // Native calls don't need the stack size check since they have no
1211   // expression stack and the arguments are already on the stack and
1212   // we only add a handful of words to the stack.
1213 
1214   // rmethod: Method*

1291   }
1292 
1293   // start execution
1294 #ifdef ASSERT
1295   {
1296     Label L;
1297     const Address monitor_block_top(rfp,
1298                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1299     __ ldr(rscratch1, monitor_block_top);
1300     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1301     __ cmp(esp, rscratch1);
1302     __ br(Assembler::EQ, L);
1303     __ stop("broken stack frame setup in interpreter 1");
1304     __ bind(L);
1305   }
1306 #endif
1307 
1308   // jvmti support
1309   __ notify_method_entry();
1310 
1311   if (runtime_upcalls) {
1312     __ generate_runtime_upcalls_on_method_entry();
1313   }
1314 
1315   // work registers
1316   const Register t = r17;
1317   const Register result_handler = r19;
1318 
1319   // allocate space for parameters
1320   __ ldr(t, Address(rmethod, Method::const_offset()));
1321   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1322 
1323   __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1324   __ andr(sp, rscratch1, -16);
1325   __ mov(esp, rscratch1);
1326 
1327   // get signature handler
1328   {
1329     Label L;
1330     __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1331     __ cbnz(t, L);
1332     __ call_VM(noreg,
1333                CAST_FROM_FN_PTR(address,
1334                                 InterpreterRuntime::prepare_native_call),

1645 
1646   JFR_ONLY(__ leave_jfr_critical_section();)
1647 
1648   // restore sender sp
1649   __ mov(sp, esp);
1650 
1651   __ ret(lr);
1652 
1653   if (inc_counter) {
1654     // Handle overflow of counter and compile method
1655     __ bind(invocation_counter_overflow);
1656     generate_counter_overflow(continue_after_compile);
1657   }
1658 
1659   return entry_point;
1660 }
1661 
1662 //
1663 // Generic interpreted method entry to (asm) interpreter
1664 //
1665 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1666   // determine code generation flags
1667   bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1668 
1669   // rscratch1: sender sp
1670   address entry_point = __ pc();
1671 
1672   const Address constMethod(rmethod, Method::const_offset());
1673   const Address access_flags(rmethod, Method::access_flags_offset());
1674   const Address size_of_parameters(r3,
1675                                    ConstMethod::size_of_parameters_offset());
1676   const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1677 
1678   // get parameter size (always needed)
1679   // need to load the const method first
1680   __ ldr(r3, constMethod);
1681   __ load_unsigned_short(r2, size_of_parameters);
1682 
1683   // r2: size of parameters
1684 
1685   __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1686   __ sub(r3, r3, r2); // r3 = no. of additional locals
1687 

1790   }
1791 
1792   // start execution
1793 #ifdef ASSERT
1794   {
1795     Label L;
1796      const Address monitor_block_top (rfp,
1797                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1798     __ ldr(rscratch1, monitor_block_top);
1799     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1800     __ cmp(esp, rscratch1);
1801     __ br(Assembler::EQ, L);
1802     __ stop("broken stack frame setup in interpreter 2");
1803     __ bind(L);
1804   }
1805 #endif
1806 
1807   // jvmti support
1808   __ notify_method_entry();
1809 
1810   // runtime upcalls
1811   if (runtime_upcalls) {
1812     __ generate_runtime_upcalls_on_method_entry();
1813   }
1814 
1815   __ dispatch_next(vtos);
1816 
1817   // invocation counter overflow
1818   if (inc_counter) {
1819     // Handle overflow of counter and compile method
1820     __ bind(invocation_counter_overflow);
1821     generate_counter_overflow(continue_after_compile);
1822   }
1823 
1824   return entry_point;
1825 }
1826 
1827 // Method entry for java.lang.Thread.currentThread
1828 address TemplateInterpreterGenerator::generate_currentThread() {
1829   address entry_point = __ pc();
1830 
1831   __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1832   __ resolve_oop_handle(r0, rscratch1, rscratch2);
1833   __ ret(lr);
1834 

2112 #ifndef PRODUCT
2113 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2114   address entry = __ pc();
2115 
2116   __ protect_return_address();
2117   __ push(lr);
2118   __ push(state);
2119   __ push(RegSet::range(r0, r15), sp);
2120   __ mov(c_rarg2, r0);  // Pass itos
2121   __ call_VM(noreg,
2122              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2123              c_rarg1, c_rarg2, c_rarg3);
2124   __ pop(RegSet::range(r0, r15), sp);
2125   __ pop(state);
2126   __ pop(lr);
2127   __ authenticate_return_address();
2128   __ ret(lr);                                   // return from result handler
2129 
2130   return entry;
2131 }
2132 #endif // PRODUCT
2133 
2134 void TemplateInterpreterGenerator::count_bytecode() {
2135   if (CountBytecodesPerThread) {
2136     Address bc_counter_addr(rthread, Thread::bc_counter_offset());
2137     __ ldr(r10, bc_counter_addr);
2138     __ add(r10, r10, 1);
2139     __ str(r10, bc_counter_addr);
2140   }
2141   if (CountBytecodes || TraceBytecodes || StopInterpreterAt > 0) {
2142     __ mov(r10, (address) &BytecodeCounter::_counter_value);
2143     __ atomic_add(noreg, 1, r10);
2144   }
2145 }
2146 
2147 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2148   __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2149   __ atomic_addw(noreg, 1, r10);
2150 }
2151 
2152 #ifndef PRODUCT
2153 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2154   // Calculate new index for counter:
2155   //   _index = (_index >> log2_number_of_codes) |
2156   //            (bytecode << log2_number_of_codes);
2157   Register index_addr = rscratch1;
2158   Register index = rscratch2;
2159   __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2160   __ ldrw(index, index_addr);
2161   __ mov(r10,
2162          ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2163   __ orrw(index, r10, index, Assembler::LSR,
2164           BytecodePairHistogram::log2_number_of_codes);
2165   __ strw(index, index_addr);
2166 
2167   // Bump bucket contents:
2168   //   _counters[_index] ++;
2169   Register counter_addr = rscratch1;
2170   __ mov(r10, (address) &BytecodePairHistogram::_counters);
2171   __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2172   __ atomic_addw(noreg, 1, counter_addr);
< prev index next >