< prev index next >

src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp

Print this page

1167 
1168   for (int p = 1; p <= n_shadow_pages; p++) {
1169     __ sub(rscratch2, sp, p*page_size);
1170     __ str(zr, Address(rscratch2));
1171   }
1172 
1173   // Record the new watermark, but only if the update is above the safe limit.
1174   // Otherwise, the next time around the check above would pass the safe limit.
1175   __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1176   __ cmp(sp, rscratch1);
1177   __ br(Assembler::LS, L_done);
1178   __ mov(rscratch1, sp);
1179   __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1180 
1181   __ bind(L_done);
1182 }
1183 
1184 // Interpreter stub for calling a native method. (asm interpreter)
1185 // This sets up a somewhat different looking stack for calling the
1186 // native method than the typical interpreter frame setup.
1187 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
1188   // determine code generation flags
1189   bool inc_counter  = UseCompiler || CountCompiledCalls;
1190 
1191   // r1: Method*
1192   // rscratch1: sender sp
1193 
1194   address entry_point = __ pc();
1195 
1196   const Address constMethod       (rmethod, Method::const_offset());
1197   const Address access_flags      (rmethod, Method::access_flags_offset());
1198   const Address size_of_parameters(r2, ConstMethod::
1199                                        size_of_parameters_offset());
1200 
1201   // get parameter size (always needed)
1202   __ ldr(r2, constMethod);
1203   __ load_unsigned_short(r2, size_of_parameters);
1204 
1205   // Native calls don't need the stack size check since they have no
1206   // expression stack and the arguments are already on the stack and
1207   // we only add a handful of words to the stack.

1286   }
1287 
1288   // start execution
1289 #ifdef ASSERT
1290   {
1291     Label L;
1292     const Address monitor_block_top(rfp,
1293                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1294     __ ldr(rscratch1, monitor_block_top);
1295     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1296     __ cmp(esp, rscratch1);
1297     __ br(Assembler::EQ, L);
1298     __ stop("broken stack frame setup in interpreter 1");
1299     __ bind(L);
1300   }
1301 #endif
1302 
1303   // jvmti support
1304   __ notify_method_entry();
1305 




1306   // work registers
1307   const Register t = r17;
1308   const Register result_handler = r19;
1309 
1310   // allocate space for parameters
1311   __ ldr(t, Address(rmethod, Method::const_offset()));
1312   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1313 
1314   __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1315   __ andr(sp, rscratch1, -16);
1316   __ mov(esp, rscratch1);
1317 
1318   // get signature handler
1319   {
1320     Label L;
1321     __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1322     __ cbnz(t, L);
1323     __ call_VM(noreg,
1324                CAST_FROM_FN_PTR(address,
1325                                 InterpreterRuntime::prepare_native_call),

1615   // remove frame anchor
1616   __ leave();
1617 
1618   // restore sender sp
1619   __ mov(sp, esp);
1620 
1621   __ ret(lr);
1622 
1623   if (inc_counter) {
1624     // Handle overflow of counter and compile method
1625     __ bind(invocation_counter_overflow);
1626     generate_counter_overflow(continue_after_compile);
1627   }
1628 
1629   return entry_point;
1630 }
1631 
1632 //
1633 // Generic interpreted method entry to (asm) interpreter
1634 //
1635 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1636   // determine code generation flags
1637   bool inc_counter  = UseCompiler || CountCompiledCalls;
1638 
1639   // rscratch1: sender sp
1640   address entry_point = __ pc();
1641 
1642   const Address constMethod(rmethod, Method::const_offset());
1643   const Address access_flags(rmethod, Method::access_flags_offset());
1644   const Address size_of_parameters(r3,
1645                                    ConstMethod::size_of_parameters_offset());
1646   const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1647 
1648   // get parameter size (always needed)
1649   // need to load the const method first
1650   __ ldr(r3, constMethod);
1651   __ load_unsigned_short(r2, size_of_parameters);
1652 
1653   // r2: size of parameters
1654 
1655   __ load_unsigned_short(r3, size_of_locals); // get size of locals in words

1760   }
1761 
1762   // start execution
1763 #ifdef ASSERT
1764   {
1765     Label L;
1766      const Address monitor_block_top (rfp,
1767                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1768     __ ldr(rscratch1, monitor_block_top);
1769     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1770     __ cmp(esp, rscratch1);
1771     __ br(Assembler::EQ, L);
1772     __ stop("broken stack frame setup in interpreter 2");
1773     __ bind(L);
1774   }
1775 #endif
1776 
1777   // jvmti support
1778   __ notify_method_entry();
1779 





1780   __ dispatch_next(vtos);
1781 
1782   // invocation counter overflow
1783   if (inc_counter) {
1784     // Handle overflow of counter and compile method
1785     __ bind(invocation_counter_overflow);
1786     generate_counter_overflow(continue_after_compile);
1787   }
1788 
1789   return entry_point;
1790 }
1791 
1792 // Method entry for java.lang.Thread.currentThread
1793 address TemplateInterpreterGenerator::generate_currentThread() {
1794   address entry_point = __ pc();
1795 
1796   __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1797   __ resolve_oop_handle(r0, rscratch1, rscratch2);
1798   __ ret(lr);
1799 

2061       __ push_ptr();
2062       __ b(L);
2063   fep = __ pc();     // ftos entry point
2064       __ push_f();
2065       __ b(L);
2066   dep = __ pc();     // dtos entry point
2067       __ push_d();
2068       __ b(L);
2069   lep = __ pc();     // ltos entry point
2070       __ push_l();
2071       __ b(L);
2072   bep = cep = sep = iep = __ pc();     // [bcsi]tos entry point
2073       __ push_i();
2074   vep = __ pc();     // vtos entry point
2075   __ bind(L);
2076   generate_and_dispatch(t);
2077 }
2078 
2079 //-----------------------------------------------------------------------------
2080 


















2081 // Non-product code
2082 #ifndef PRODUCT
2083 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2084   address entry = __ pc();
2085 
2086   __ protect_return_address();
2087   __ push(lr);
2088   __ push(state);
2089   __ push(RegSet::range(r0, r15), sp);
2090   __ mov(c_rarg2, r0);  // Pass itos
2091   __ call_VM(noreg,
2092              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2093              c_rarg1, c_rarg2, c_rarg3);
2094   __ pop(RegSet::range(r0, r15), sp);
2095   __ pop(state);
2096   __ pop(lr);
2097   __ authenticate_return_address();
2098   __ ret(lr);                                   // return from result handler
2099 
2100   return entry;
2101 }
2102 
2103 void TemplateInterpreterGenerator::count_bytecode() {
2104   __ mov(r10, (address) &BytecodeCounter::_counter_value);
2105   __ atomic_addw(noreg, 1, r10);
2106 }
2107 
2108 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2109   __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2110   __ atomic_addw(noreg, 1, r10);
2111 }
2112 
2113 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2114   // Calculate new index for counter:
2115   //   _index = (_index >> log2_number_of_codes) |
2116   //            (bytecode << log2_number_of_codes);
2117   Register index_addr = rscratch1;
2118   Register index = rscratch2;
2119   __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2120   __ ldrw(index, index_addr);
2121   __ mov(r10,
2122          ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2123   __ orrw(index, r10, index, Assembler::LSR,
2124           BytecodePairHistogram::log2_number_of_codes);
2125   __ strw(index, index_addr);
2126 
2127   // Bump bucket contents:
2128   //   _counters[_index] ++;
2129   Register counter_addr = rscratch1;
2130   __ mov(r10, (address) &BytecodePairHistogram::_counters);
2131   __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2132   __ atomic_addw(noreg, 1, counter_addr);

1167 
1168   for (int p = 1; p <= n_shadow_pages; p++) {
1169     __ sub(rscratch2, sp, p*page_size);
1170     __ str(zr, Address(rscratch2));
1171   }
1172 
1173   // Record the new watermark, but only if the update is above the safe limit.
1174   // Otherwise, the next time around the check above would pass the safe limit.
1175   __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1176   __ cmp(sp, rscratch1);
1177   __ br(Assembler::LS, L_done);
1178   __ mov(rscratch1, sp);
1179   __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1180 
1181   __ bind(L_done);
1182 }
1183 
1184 // Interpreter stub for calling a native method. (asm interpreter)
1185 // This sets up a somewhat different looking stack for calling the
1186 // native method than the typical interpreter frame setup.
1187 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
1188   // determine code generation flags
1189   bool inc_counter  = UseCompiler || CountCompiledCalls;
1190 
1191   // r1: Method*
1192   // rscratch1: sender sp
1193 
1194   address entry_point = __ pc();
1195 
1196   const Address constMethod       (rmethod, Method::const_offset());
1197   const Address access_flags      (rmethod, Method::access_flags_offset());
1198   const Address size_of_parameters(r2, ConstMethod::
1199                                        size_of_parameters_offset());
1200 
1201   // get parameter size (always needed)
1202   __ ldr(r2, constMethod);
1203   __ load_unsigned_short(r2, size_of_parameters);
1204 
1205   // Native calls don't need the stack size check since they have no
1206   // expression stack and the arguments are already on the stack and
1207   // we only add a handful of words to the stack.

1286   }
1287 
1288   // start execution
1289 #ifdef ASSERT
1290   {
1291     Label L;
1292     const Address monitor_block_top(rfp,
1293                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1294     __ ldr(rscratch1, monitor_block_top);
1295     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1296     __ cmp(esp, rscratch1);
1297     __ br(Assembler::EQ, L);
1298     __ stop("broken stack frame setup in interpreter 1");
1299     __ bind(L);
1300   }
1301 #endif
1302 
1303   // jvmti support
1304   __ notify_method_entry();
1305 
1306   if (runtime_upcalls) {
1307     __ generate_runtime_upcalls_on_method_entry();
1308   }
1309 
1310   // work registers
1311   const Register t = r17;
1312   const Register result_handler = r19;
1313 
1314   // allocate space for parameters
1315   __ ldr(t, Address(rmethod, Method::const_offset()));
1316   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1317 
1318   __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1319   __ andr(sp, rscratch1, -16);
1320   __ mov(esp, rscratch1);
1321 
1322   // get signature handler
1323   {
1324     Label L;
1325     __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1326     __ cbnz(t, L);
1327     __ call_VM(noreg,
1328                CAST_FROM_FN_PTR(address,
1329                                 InterpreterRuntime::prepare_native_call),

1619   // remove frame anchor
1620   __ leave();
1621 
1622   // restore sender sp
1623   __ mov(sp, esp);
1624 
1625   __ ret(lr);
1626 
1627   if (inc_counter) {
1628     // Handle overflow of counter and compile method
1629     __ bind(invocation_counter_overflow);
1630     generate_counter_overflow(continue_after_compile);
1631   }
1632 
1633   return entry_point;
1634 }
1635 
1636 //
1637 // Generic interpreted method entry to (asm) interpreter
1638 //
1639 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1640   // determine code generation flags
1641   bool inc_counter  = UseCompiler || CountCompiledCalls;
1642 
1643   // rscratch1: sender sp
1644   address entry_point = __ pc();
1645 
1646   const Address constMethod(rmethod, Method::const_offset());
1647   const Address access_flags(rmethod, Method::access_flags_offset());
1648   const Address size_of_parameters(r3,
1649                                    ConstMethod::size_of_parameters_offset());
1650   const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1651 
1652   // get parameter size (always needed)
1653   // need to load the const method first
1654   __ ldr(r3, constMethod);
1655   __ load_unsigned_short(r2, size_of_parameters);
1656 
1657   // r2: size of parameters
1658 
1659   __ load_unsigned_short(r3, size_of_locals); // get size of locals in words

1764   }
1765 
1766   // start execution
1767 #ifdef ASSERT
1768   {
1769     Label L;
1770      const Address monitor_block_top (rfp,
1771                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1772     __ ldr(rscratch1, monitor_block_top);
1773     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1774     __ cmp(esp, rscratch1);
1775     __ br(Assembler::EQ, L);
1776     __ stop("broken stack frame setup in interpreter 2");
1777     __ bind(L);
1778   }
1779 #endif
1780 
1781   // jvmti support
1782   __ notify_method_entry();
1783 
1784   // runtime upcalls
1785   if (runtime_upcalls) {
1786     __ generate_runtime_upcalls_on_method_entry();
1787   }
1788 
1789   __ dispatch_next(vtos);
1790 
1791   // invocation counter overflow
1792   if (inc_counter) {
1793     // Handle overflow of counter and compile method
1794     __ bind(invocation_counter_overflow);
1795     generate_counter_overflow(continue_after_compile);
1796   }
1797 
1798   return entry_point;
1799 }
1800 
1801 // Method entry for java.lang.Thread.currentThread
1802 address TemplateInterpreterGenerator::generate_currentThread() {
1803   address entry_point = __ pc();
1804 
1805   __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1806   __ resolve_oop_handle(r0, rscratch1, rscratch2);
1807   __ ret(lr);
1808 

2070       __ push_ptr();
2071       __ b(L);
2072   fep = __ pc();     // ftos entry point
2073       __ push_f();
2074       __ b(L);
2075   dep = __ pc();     // dtos entry point
2076       __ push_d();
2077       __ b(L);
2078   lep = __ pc();     // ltos entry point
2079       __ push_l();
2080       __ b(L);
2081   bep = cep = sep = iep = __ pc();     // [bcsi]tos entry point
2082       __ push_i();
2083   vep = __ pc();     // vtos entry point
2084   __ bind(L);
2085   generate_and_dispatch(t);
2086 }
2087 
2088 //-----------------------------------------------------------------------------
2089 
2090 void TemplateInterpreterGenerator::count_bytecode() {
2091   if (CountBytecodesPerThread) {
2092     Address bc_counter_addr(rthread, Thread::bc_counter_offset());
2093     __ ldr(r10, bc_counter_addr);
2094     __ add(r10, r10, 1);
2095     __ str(r10, bc_counter_addr);
2096   }
2097   if (CountBytecodes || TraceBytecodes || StopInterpreterAt > 0) {
2098     __ mov(r10, (address) &BytecodeCounter::_counter_value);
2099     __ atomic_add(noreg, 1, r10);
2100   }
2101 }
2102 
2103 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2104   __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2105   __ atomic_addw(noreg, 1, r10);
2106 }
2107 
2108 // Non-product code
2109 #ifndef PRODUCT
2110 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2111   address entry = __ pc();
2112 
2113   __ protect_return_address();
2114   __ push(lr);
2115   __ push(state);
2116   __ push(RegSet::range(r0, r15), sp);
2117   __ mov(c_rarg2, r0);  // Pass itos
2118   __ call_VM(noreg,
2119              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2120              c_rarg1, c_rarg2, c_rarg3);
2121   __ pop(RegSet::range(r0, r15), sp);
2122   __ pop(state);
2123   __ pop(lr);
2124   __ authenticate_return_address();
2125   __ ret(lr);                                   // return from result handler
2126 
2127   return entry;
2128 }
2129 










2130 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2131   // Calculate new index for counter:
2132   //   _index = (_index >> log2_number_of_codes) |
2133   //            (bytecode << log2_number_of_codes);
2134   Register index_addr = rscratch1;
2135   Register index = rscratch2;
2136   __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2137   __ ldrw(index, index_addr);
2138   __ mov(r10,
2139          ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2140   __ orrw(index, r10, index, Assembler::LSR,
2141           BytecodePairHistogram::log2_number_of_codes);
2142   __ strw(index, index_addr);
2143 
2144   // Bump bucket contents:
2145   //   _counters[_index] ++;
2146   Register counter_addr = rscratch1;
2147   __ mov(r10, (address) &BytecodePairHistogram::_counters);
2148   __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2149   __ atomic_addw(noreg, 1, counter_addr);
< prev index next >