< prev index next >

src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp

Print this page

1168 
1169   for (int p = 1; p <= n_shadow_pages; p++) {
1170     __ sub(rscratch2, sp, p*page_size);
1171     __ str(zr, Address(rscratch2));
1172   }
1173 
1174   // Record the new watermark, but only if the update is above the safe limit.
1175   // Otherwise, the next time around the check above would pass the safe limit.
1176   __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1177   __ cmp(sp, rscratch1);
1178   __ br(Assembler::LS, L_done);
1179   __ mov(rscratch1, sp);
1180   __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1181 
1182   __ bind(L_done);
1183 }
1184 
1185 // Interpreter stub for calling a native method. (asm interpreter)
1186 // This sets up a somewhat different looking stack for calling the
1187 // native method than the typical interpreter frame setup.
1188 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
1189   // determine code generation flags
1190   bool inc_counter  = UseCompiler || CountCompiledCalls;
1191 
1192   // r1: Method*
1193   // rscratch1: sender sp
1194 
1195   address entry_point = __ pc();
1196 
1197   const Address constMethod       (rmethod, Method::const_offset());
1198   const Address access_flags      (rmethod, Method::access_flags_offset());
1199   const Address size_of_parameters(r2, ConstMethod::
1200                                        size_of_parameters_offset());
1201 
1202   // get parameter size (always needed)
1203   __ ldr(r2, constMethod);
1204   __ load_unsigned_short(r2, size_of_parameters);
1205 
1206   // Native calls don't need the stack size check since they have no
1207   // expression stack and the arguments are already on the stack and
1208   // we only add a handful of words to the stack.

1287   }
1288 
1289   // start execution
1290 #ifdef ASSERT
1291   {
1292     Label L;
1293     const Address monitor_block_top(rfp,
1294                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1295     __ ldr(rscratch1, monitor_block_top);
1296     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1297     __ cmp(esp, rscratch1);
1298     __ br(Assembler::EQ, L);
1299     __ stop("broken stack frame setup in interpreter 1");
1300     __ bind(L);
1301   }
1302 #endif
1303 
1304   // jvmti support
1305   __ notify_method_entry();
1306 




1307   // work registers
1308   const Register t = r17;
1309   const Register result_handler = r19;
1310 
1311   // allocate space for parameters
1312   __ ldr(t, Address(rmethod, Method::const_offset()));
1313   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1314 
1315   __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1316   __ andr(sp, rscratch1, -16);
1317   __ mov(esp, rscratch1);
1318 
1319   // get signature handler
1320   {
1321     Label L;
1322     __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1323     __ cbnz(t, L);
1324     __ call_VM(noreg,
1325                CAST_FROM_FN_PTR(address,
1326                                 InterpreterRuntime::prepare_native_call),

1616   // remove frame anchor
1617   __ leave();
1618 
1619   // restore sender sp
1620   __ mov(sp, esp);
1621 
1622   __ ret(lr);
1623 
1624   if (inc_counter) {
1625     // Handle overflow of counter and compile method
1626     __ bind(invocation_counter_overflow);
1627     generate_counter_overflow(continue_after_compile);
1628   }
1629 
1630   return entry_point;
1631 }
1632 
1633 //
1634 // Generic interpreted method entry to (asm) interpreter
1635 //
1636 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1637   // determine code generation flags
1638   bool inc_counter  = UseCompiler || CountCompiledCalls;
1639 
1640   // rscratch1: sender sp
1641   address entry_point = __ pc();
1642 
1643   const Address constMethod(rmethod, Method::const_offset());
1644   const Address access_flags(rmethod, Method::access_flags_offset());
1645   const Address size_of_parameters(r3,
1646                                    ConstMethod::size_of_parameters_offset());
1647   const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1648 
1649   // get parameter size (always needed)
1650   // need to load the const method first
1651   __ ldr(r3, constMethod);
1652   __ load_unsigned_short(r2, size_of_parameters);
1653 
1654   // r2: size of parameters
1655 
1656   __ load_unsigned_short(r3, size_of_locals); // get size of locals in words

1761   }
1762 
1763   // start execution
1764 #ifdef ASSERT
1765   {
1766     Label L;
1767      const Address monitor_block_top (rfp,
1768                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1769     __ ldr(rscratch1, monitor_block_top);
1770     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1771     __ cmp(esp, rscratch1);
1772     __ br(Assembler::EQ, L);
1773     __ stop("broken stack frame setup in interpreter 2");
1774     __ bind(L);
1775   }
1776 #endif
1777 
1778   // jvmti support
1779   __ notify_method_entry();
1780 





1781   __ dispatch_next(vtos);
1782 
1783   // invocation counter overflow
1784   if (inc_counter) {
1785     // Handle overflow of counter and compile method
1786     __ bind(invocation_counter_overflow);
1787     generate_counter_overflow(continue_after_compile);
1788   }
1789 
1790   return entry_point;
1791 }
1792 
1793 // Method entry for java.lang.Thread.currentThread
1794 address TemplateInterpreterGenerator::generate_currentThread() {
1795   address entry_point = __ pc();
1796 
1797   __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1798   __ resolve_oop_handle(r0, rscratch1, rscratch2);
1799   __ ret(lr);
1800 

2062       __ push_ptr();
2063       __ b(L);
2064   fep = __ pc();     // ftos entry point
2065       __ push_f();
2066       __ b(L);
2067   dep = __ pc();     // dtos entry point
2068       __ push_d();
2069       __ b(L);
2070   lep = __ pc();     // ltos entry point
2071       __ push_l();
2072       __ b(L);
2073   bep = cep = sep = iep = __ pc();     // [bcsi]tos entry point
2074       __ push_i();
2075   vep = __ pc();     // vtos entry point
2076   __ bind(L);
2077   generate_and_dispatch(t);
2078 }
2079 
2080 //-----------------------------------------------------------------------------
2081 


















2082 // Non-product code
2083 #ifndef PRODUCT
2084 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2085   address entry = __ pc();
2086 
2087   __ protect_return_address();
2088   __ push(lr);
2089   __ push(state);
2090   __ push(RegSet::range(r0, r15), sp);
2091   __ mov(c_rarg2, r0);  // Pass itos
2092   __ call_VM(noreg,
2093              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2094              c_rarg1, c_rarg2, c_rarg3);
2095   __ pop(RegSet::range(r0, r15), sp);
2096   __ pop(state);
2097   __ pop(lr);
2098   __ authenticate_return_address();
2099   __ ret(lr);                                   // return from result handler
2100 
2101   return entry;
2102 }
2103 
2104 void TemplateInterpreterGenerator::count_bytecode() {
2105   __ mov(r10, (address) &BytecodeCounter::_counter_value);
2106   __ atomic_addw(noreg, 1, r10);
2107 }
2108 
2109 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2110   __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2111   __ atomic_addw(noreg, 1, r10);
2112 }
2113 
2114 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2115   // Calculate new index for counter:
2116   //   _index = (_index >> log2_number_of_codes) |
2117   //            (bytecode << log2_number_of_codes);
2118   Register index_addr = rscratch1;
2119   Register index = rscratch2;
2120   __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2121   __ ldrw(index, index_addr);
2122   __ mov(r10,
2123          ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2124   __ orrw(index, r10, index, Assembler::LSR,
2125           BytecodePairHistogram::log2_number_of_codes);
2126   __ strw(index, index_addr);
2127 
2128   // Bump bucket contents:
2129   //   _counters[_index] ++;
2130   Register counter_addr = rscratch1;
2131   __ mov(r10, (address) &BytecodePairHistogram::_counters);
2132   __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2133   __ atomic_addw(noreg, 1, counter_addr);

1168 
1169   for (int p = 1; p <= n_shadow_pages; p++) {
1170     __ sub(rscratch2, sp, p*page_size);
1171     __ str(zr, Address(rscratch2));
1172   }
1173 
1174   // Record the new watermark, but only if the update is above the safe limit.
1175   // Otherwise, the next time around the check above would pass the safe limit.
1176   __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1177   __ cmp(sp, rscratch1);
1178   __ br(Assembler::LS, L_done);
1179   __ mov(rscratch1, sp);
1180   __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1181 
1182   __ bind(L_done);
1183 }
1184 
1185 // Interpreter stub for calling a native method. (asm interpreter)
1186 // This sets up a somewhat different looking stack for calling the
1187 // native method than the typical interpreter frame setup.
1188 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
1189   // determine code generation flags
1190   bool inc_counter  = UseCompiler || CountCompiledCalls;
1191 
1192   // r1: Method*
1193   // rscratch1: sender sp
1194 
1195   address entry_point = __ pc();
1196 
1197   const Address constMethod       (rmethod, Method::const_offset());
1198   const Address access_flags      (rmethod, Method::access_flags_offset());
1199   const Address size_of_parameters(r2, ConstMethod::
1200                                        size_of_parameters_offset());
1201 
1202   // get parameter size (always needed)
1203   __ ldr(r2, constMethod);
1204   __ load_unsigned_short(r2, size_of_parameters);
1205 
1206   // Native calls don't need the stack size check since they have no
1207   // expression stack and the arguments are already on the stack and
1208   // we only add a handful of words to the stack.

1287   }
1288 
1289   // start execution
1290 #ifdef ASSERT
1291   {
1292     Label L;
1293     const Address monitor_block_top(rfp,
1294                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1295     __ ldr(rscratch1, monitor_block_top);
1296     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1297     __ cmp(esp, rscratch1);
1298     __ br(Assembler::EQ, L);
1299     __ stop("broken stack frame setup in interpreter 1");
1300     __ bind(L);
1301   }
1302 #endif
1303 
1304   // jvmti support
1305   __ notify_method_entry();
1306 
1307   if (runtime_upcalls) {
1308     __ generate_runtime_upcalls_on_method_entry();
1309   }
1310 
1311   // work registers
1312   const Register t = r17;
1313   const Register result_handler = r19;
1314 
1315   // allocate space for parameters
1316   __ ldr(t, Address(rmethod, Method::const_offset()));
1317   __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1318 
1319   __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1320   __ andr(sp, rscratch1, -16);
1321   __ mov(esp, rscratch1);
1322 
1323   // get signature handler
1324   {
1325     Label L;
1326     __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1327     __ cbnz(t, L);
1328     __ call_VM(noreg,
1329                CAST_FROM_FN_PTR(address,
1330                                 InterpreterRuntime::prepare_native_call),

1620   // remove frame anchor
1621   __ leave();
1622 
1623   // restore sender sp
1624   __ mov(sp, esp);
1625 
1626   __ ret(lr);
1627 
1628   if (inc_counter) {
1629     // Handle overflow of counter and compile method
1630     __ bind(invocation_counter_overflow);
1631     generate_counter_overflow(continue_after_compile);
1632   }
1633 
1634   return entry_point;
1635 }
1636 
1637 //
1638 // Generic interpreted method entry to (asm) interpreter
1639 //
1640 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1641   // determine code generation flags
1642   bool inc_counter  = UseCompiler || CountCompiledCalls;
1643 
1644   // rscratch1: sender sp
1645   address entry_point = __ pc();
1646 
1647   const Address constMethod(rmethod, Method::const_offset());
1648   const Address access_flags(rmethod, Method::access_flags_offset());
1649   const Address size_of_parameters(r3,
1650                                    ConstMethod::size_of_parameters_offset());
1651   const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1652 
1653   // get parameter size (always needed)
1654   // need to load the const method first
1655   __ ldr(r3, constMethod);
1656   __ load_unsigned_short(r2, size_of_parameters);
1657 
1658   // r2: size of parameters
1659 
1660   __ load_unsigned_short(r3, size_of_locals); // get size of locals in words

1765   }
1766 
1767   // start execution
1768 #ifdef ASSERT
1769   {
1770     Label L;
1771      const Address monitor_block_top (rfp,
1772                  frame::interpreter_frame_monitor_block_top_offset * wordSize);
1773     __ ldr(rscratch1, monitor_block_top);
1774     __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1775     __ cmp(esp, rscratch1);
1776     __ br(Assembler::EQ, L);
1777     __ stop("broken stack frame setup in interpreter 2");
1778     __ bind(L);
1779   }
1780 #endif
1781 
1782   // jvmti support
1783   __ notify_method_entry();
1784 
1785   // runtime upcalls
1786   if (runtime_upcalls) {
1787     __ generate_runtime_upcalls_on_method_entry();
1788   }
1789 
1790   __ dispatch_next(vtos);
1791 
1792   // invocation counter overflow
1793   if (inc_counter) {
1794     // Handle overflow of counter and compile method
1795     __ bind(invocation_counter_overflow);
1796     generate_counter_overflow(continue_after_compile);
1797   }
1798 
1799   return entry_point;
1800 }
1801 
1802 // Method entry for java.lang.Thread.currentThread
1803 address TemplateInterpreterGenerator::generate_currentThread() {
1804   address entry_point = __ pc();
1805 
1806   __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1807   __ resolve_oop_handle(r0, rscratch1, rscratch2);
1808   __ ret(lr);
1809 

2071       __ push_ptr();
2072       __ b(L);
2073   fep = __ pc();     // ftos entry point
2074       __ push_f();
2075       __ b(L);
2076   dep = __ pc();     // dtos entry point
2077       __ push_d();
2078       __ b(L);
2079   lep = __ pc();     // ltos entry point
2080       __ push_l();
2081       __ b(L);
2082   bep = cep = sep = iep = __ pc();     // [bcsi]tos entry point
2083       __ push_i();
2084   vep = __ pc();     // vtos entry point
2085   __ bind(L);
2086   generate_and_dispatch(t);
2087 }
2088 
2089 //-----------------------------------------------------------------------------
2090 
2091 void TemplateInterpreterGenerator::count_bytecode() {
2092   if (CountBytecodesPerThread) {
2093     Address bc_counter_addr(rthread, Thread::bc_counter_offset());
2094     __ ldr(r10, bc_counter_addr);
2095     __ add(r10, r10, 1);
2096     __ str(r10, bc_counter_addr);
2097   }
2098   if (CountBytecodes || TraceBytecodes || StopInterpreterAt > 0) {
2099     __ mov(r10, (address) &BytecodeCounter::_counter_value);
2100     __ atomic_add(noreg, 1, r10);
2101   }
2102 }
2103 
2104 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2105   __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2106   __ atomic_addw(noreg, 1, r10);
2107 }
2108 
2109 // Non-product code
2110 #ifndef PRODUCT
2111 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2112   address entry = __ pc();
2113 
2114   __ protect_return_address();
2115   __ push(lr);
2116   __ push(state);
2117   __ push(RegSet::range(r0, r15), sp);
2118   __ mov(c_rarg2, r0);  // Pass itos
2119   __ call_VM(noreg,
2120              CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2121              c_rarg1, c_rarg2, c_rarg3);
2122   __ pop(RegSet::range(r0, r15), sp);
2123   __ pop(state);
2124   __ pop(lr);
2125   __ authenticate_return_address();
2126   __ ret(lr);                                   // return from result handler
2127 
2128   return entry;
2129 }
2130 










2131 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2132   // Calculate new index for counter:
2133   //   _index = (_index >> log2_number_of_codes) |
2134   //            (bytecode << log2_number_of_codes);
2135   Register index_addr = rscratch1;
2136   Register index = rscratch2;
2137   __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2138   __ ldrw(index, index_addr);
2139   __ mov(r10,
2140          ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2141   __ orrw(index, r10, index, Assembler::LSR,
2142           BytecodePairHistogram::log2_number_of_codes);
2143   __ strw(index, index_addr);
2144 
2145   // Bump bucket contents:
2146   //   _counters[_index] ++;
2147   Register counter_addr = rscratch1;
2148   __ mov(r10, (address) &BytecodePairHistogram::_counters);
2149   __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2150   __ atomic_addw(noreg, 1, counter_addr);
< prev index next >