1172
1173 for (int p = 1; p <= n_shadow_pages; p++) {
1174 __ sub(rscratch2, sp, p*page_size);
1175 __ str(zr, Address(rscratch2));
1176 }
1177
1178 // Record the new watermark, but only if the update is above the safe limit.
1179 // Otherwise, the next time around the check above would pass the safe limit.
1180 __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1181 __ cmp(sp, rscratch1);
1182 __ br(Assembler::LS, L_done);
1183 __ mov(rscratch1, sp);
1184 __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1185
1186 __ bind(L_done);
1187 }
1188
1189 // Interpreter stub for calling a native method. (asm interpreter)
1190 // This sets up a somewhat different looking stack for calling the
1191 // native method than the typical interpreter frame setup.
1192 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized) {
1193 // determine code generation flags
1194 bool inc_counter = UseCompiler || CountCompiledCalls;
1195
1196 // r1: Method*
1197 // rscratch1: sender sp
1198
1199 address entry_point = __ pc();
1200
1201 const Address constMethod (rmethod, Method::const_offset());
1202 const Address access_flags (rmethod, Method::access_flags_offset());
1203 const Address size_of_parameters(r2, ConstMethod::
1204 size_of_parameters_offset());
1205
1206 // get parameter size (always needed)
1207 __ ldr(r2, constMethod);
1208 __ load_unsigned_short(r2, size_of_parameters);
1209
1210 // Native calls don't need the stack size check since they have no
1211 // expression stack and the arguments are already on the stack and
1212 // we only add a handful of words to the stack.
1213
1214 // rmethod: Method*
1291 }
1292
1293 // start execution
1294 #ifdef ASSERT
1295 {
1296 Label L;
1297 const Address monitor_block_top(rfp,
1298 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1299 __ ldr(rscratch1, monitor_block_top);
1300 __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1301 __ cmp(esp, rscratch1);
1302 __ br(Assembler::EQ, L);
1303 __ stop("broken stack frame setup in interpreter 1");
1304 __ bind(L);
1305 }
1306 #endif
1307
1308 // jvmti support
1309 __ notify_method_entry();
1310
1311 // work registers
1312 const Register t = r17;
1313 const Register result_handler = r19;
1314
1315 // allocate space for parameters
1316 __ ldr(t, Address(rmethod, Method::const_offset()));
1317 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1318
1319 __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1320 __ andr(sp, rscratch1, -16);
1321 __ mov(esp, rscratch1);
1322
1323 // get signature handler
1324 {
1325 Label L;
1326 __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1327 __ cbnz(t, L);
1328 __ call_VM(noreg,
1329 CAST_FROM_FN_PTR(address,
1330 InterpreterRuntime::prepare_native_call),
1640
1641 JFR_ONLY(__ leave_jfr_critical_section();)
1642
1643 // restore sender sp
1644 __ mov(sp, esp);
1645
1646 __ ret(lr);
1647
1648 if (inc_counter) {
1649 // Handle overflow of counter and compile method
1650 __ bind(invocation_counter_overflow);
1651 generate_counter_overflow(continue_after_compile);
1652 }
1653
1654 return entry_point;
1655 }
1656
1657 //
1658 // Generic interpreted method entry to (asm) interpreter
1659 //
1660 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1661 // determine code generation flags
1662 bool inc_counter = UseCompiler || CountCompiledCalls;
1663
1664 // rscratch1: sender sp
1665 address entry_point = __ pc();
1666
1667 const Address constMethod(rmethod, Method::const_offset());
1668 const Address access_flags(rmethod, Method::access_flags_offset());
1669 const Address size_of_parameters(r3,
1670 ConstMethod::size_of_parameters_offset());
1671 const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1672
1673 // get parameter size (always needed)
1674 // need to load the const method first
1675 __ ldr(r3, constMethod);
1676 __ load_unsigned_short(r2, size_of_parameters);
1677
1678 // r2: size of parameters
1679
1680 __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1681 __ sub(r3, r3, r2); // r3 = no. of additional locals
1682
1785 }
1786
1787 // start execution
1788 #ifdef ASSERT
1789 {
1790 Label L;
1791 const Address monitor_block_top (rfp,
1792 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1793 __ ldr(rscratch1, monitor_block_top);
1794 __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1795 __ cmp(esp, rscratch1);
1796 __ br(Assembler::EQ, L);
1797 __ stop("broken stack frame setup in interpreter 2");
1798 __ bind(L);
1799 }
1800 #endif
1801
1802 // jvmti support
1803 __ notify_method_entry();
1804
1805 __ dispatch_next(vtos);
1806
1807 // invocation counter overflow
1808 if (inc_counter) {
1809 // Handle overflow of counter and compile method
1810 __ bind(invocation_counter_overflow);
1811 generate_counter_overflow(continue_after_compile);
1812 }
1813
1814 return entry_point;
1815 }
1816
1817 // Method entry for java.lang.Thread.currentThread
1818 address TemplateInterpreterGenerator::generate_currentThread() {
1819 address entry_point = __ pc();
1820
1821 __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1822 __ resolve_oop_handle(r0, rscratch1, rscratch2);
1823 __ ret(lr);
1824
2102 #ifndef PRODUCT
2103 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2104 address entry = __ pc();
2105
2106 __ protect_return_address();
2107 __ push(lr);
2108 __ push(state);
2109 __ push(RegSet::range(r0, r15), sp);
2110 __ mov(c_rarg2, r0); // Pass itos
2111 __ call_VM(noreg,
2112 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2113 c_rarg1, c_rarg2, c_rarg3);
2114 __ pop(RegSet::range(r0, r15), sp);
2115 __ pop(state);
2116 __ pop(lr);
2117 __ authenticate_return_address();
2118 __ ret(lr); // return from result handler
2119
2120 return entry;
2121 }
2122
2123 void TemplateInterpreterGenerator::count_bytecode() {
2124 __ mov(r10, (address) &BytecodeCounter::_counter_value);
2125 __ atomic_add(noreg, 1, r10);
2126 }
2127
2128 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2129 __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2130 __ atomic_addw(noreg, 1, r10);
2131 }
2132
2133 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2134 // Calculate new index for counter:
2135 // _index = (_index >> log2_number_of_codes) |
2136 // (bytecode << log2_number_of_codes);
2137 Register index_addr = rscratch1;
2138 Register index = rscratch2;
2139 __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2140 __ ldrw(index, index_addr);
2141 __ mov(r10,
2142 ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2143 __ orrw(index, r10, index, Assembler::LSR,
2144 BytecodePairHistogram::log2_number_of_codes);
2145 __ strw(index, index_addr);
2146
2147 // Bump bucket contents:
2148 // _counters[_index] ++;
2149 Register counter_addr = rscratch1;
2150 __ mov(r10, (address) &BytecodePairHistogram::_counters);
2151 __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2152 __ atomic_addw(noreg, 1, counter_addr);
|
1172
1173 for (int p = 1; p <= n_shadow_pages; p++) {
1174 __ sub(rscratch2, sp, p*page_size);
1175 __ str(zr, Address(rscratch2));
1176 }
1177
1178 // Record the new watermark, but only if the update is above the safe limit.
1179 // Otherwise, the next time around the check above would pass the safe limit.
1180 __ ldr(rscratch1, Address(rthread, JavaThread::shadow_zone_safe_limit()));
1181 __ cmp(sp, rscratch1);
1182 __ br(Assembler::LS, L_done);
1183 __ mov(rscratch1, sp);
1184 __ str(rscratch1, Address(rthread, JavaThread::shadow_zone_growth_watermark()));
1185
1186 __ bind(L_done);
1187 }
1188
1189 // Interpreter stub for calling a native method. (asm interpreter)
1190 // This sets up a somewhat different looking stack for calling the
1191 // native method than the typical interpreter frame setup.
1192 address TemplateInterpreterGenerator::generate_native_entry(bool synchronized, bool runtime_upcalls) {
1193 // determine code generation flags
1194 bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1195
1196 // r1: Method*
1197 // rscratch1: sender sp
1198
1199 address entry_point = __ pc();
1200
1201 const Address constMethod (rmethod, Method::const_offset());
1202 const Address access_flags (rmethod, Method::access_flags_offset());
1203 const Address size_of_parameters(r2, ConstMethod::
1204 size_of_parameters_offset());
1205
1206 // get parameter size (always needed)
1207 __ ldr(r2, constMethod);
1208 __ load_unsigned_short(r2, size_of_parameters);
1209
1210 // Native calls don't need the stack size check since they have no
1211 // expression stack and the arguments are already on the stack and
1212 // we only add a handful of words to the stack.
1213
1214 // rmethod: Method*
1291 }
1292
1293 // start execution
1294 #ifdef ASSERT
1295 {
1296 Label L;
1297 const Address monitor_block_top(rfp,
1298 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1299 __ ldr(rscratch1, monitor_block_top);
1300 __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1301 __ cmp(esp, rscratch1);
1302 __ br(Assembler::EQ, L);
1303 __ stop("broken stack frame setup in interpreter 1");
1304 __ bind(L);
1305 }
1306 #endif
1307
1308 // jvmti support
1309 __ notify_method_entry();
1310
1311 if (runtime_upcalls) {
1312 __ generate_runtime_upcalls_on_method_entry();
1313 }
1314
1315 // work registers
1316 const Register t = r17;
1317 const Register result_handler = r19;
1318
1319 // allocate space for parameters
1320 __ ldr(t, Address(rmethod, Method::const_offset()));
1321 __ load_unsigned_short(t, Address(t, ConstMethod::size_of_parameters_offset()));
1322
1323 __ sub(rscratch1, esp, t, ext::uxtx, Interpreter::logStackElementSize);
1324 __ andr(sp, rscratch1, -16);
1325 __ mov(esp, rscratch1);
1326
1327 // get signature handler
1328 {
1329 Label L;
1330 __ ldr(t, Address(rmethod, Method::signature_handler_offset()));
1331 __ cbnz(t, L);
1332 __ call_VM(noreg,
1333 CAST_FROM_FN_PTR(address,
1334 InterpreterRuntime::prepare_native_call),
1644
1645 JFR_ONLY(__ leave_jfr_critical_section();)
1646
1647 // restore sender sp
1648 __ mov(sp, esp);
1649
1650 __ ret(lr);
1651
1652 if (inc_counter) {
1653 // Handle overflow of counter and compile method
1654 __ bind(invocation_counter_overflow);
1655 generate_counter_overflow(continue_after_compile);
1656 }
1657
1658 return entry_point;
1659 }
1660
1661 //
1662 // Generic interpreted method entry to (asm) interpreter
1663 //
1664 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool runtime_upcalls) {
1665 // determine code generation flags
1666 bool inc_counter = (UseCompiler || CountCompiledCalls) && !PreloadOnly;
1667
1668 // rscratch1: sender sp
1669 address entry_point = __ pc();
1670
1671 const Address constMethod(rmethod, Method::const_offset());
1672 const Address access_flags(rmethod, Method::access_flags_offset());
1673 const Address size_of_parameters(r3,
1674 ConstMethod::size_of_parameters_offset());
1675 const Address size_of_locals(r3, ConstMethod::size_of_locals_offset());
1676
1677 // get parameter size (always needed)
1678 // need to load the const method first
1679 __ ldr(r3, constMethod);
1680 __ load_unsigned_short(r2, size_of_parameters);
1681
1682 // r2: size of parameters
1683
1684 __ load_unsigned_short(r3, size_of_locals); // get size of locals in words
1685 __ sub(r3, r3, r2); // r3 = no. of additional locals
1686
1789 }
1790
1791 // start execution
1792 #ifdef ASSERT
1793 {
1794 Label L;
1795 const Address monitor_block_top (rfp,
1796 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1797 __ ldr(rscratch1, monitor_block_top);
1798 __ lea(rscratch1, Address(rfp, rscratch1, Address::lsl(Interpreter::logStackElementSize)));
1799 __ cmp(esp, rscratch1);
1800 __ br(Assembler::EQ, L);
1801 __ stop("broken stack frame setup in interpreter 2");
1802 __ bind(L);
1803 }
1804 #endif
1805
1806 // jvmti support
1807 __ notify_method_entry();
1808
1809 // runtime upcalls
1810 if (runtime_upcalls) {
1811 __ generate_runtime_upcalls_on_method_entry();
1812 }
1813
1814 __ dispatch_next(vtos);
1815
1816 // invocation counter overflow
1817 if (inc_counter) {
1818 // Handle overflow of counter and compile method
1819 __ bind(invocation_counter_overflow);
1820 generate_counter_overflow(continue_after_compile);
1821 }
1822
1823 return entry_point;
1824 }
1825
1826 // Method entry for java.lang.Thread.currentThread
1827 address TemplateInterpreterGenerator::generate_currentThread() {
1828 address entry_point = __ pc();
1829
1830 __ ldr(r0, Address(rthread, JavaThread::vthread_offset()));
1831 __ resolve_oop_handle(r0, rscratch1, rscratch2);
1832 __ ret(lr);
1833
2111 #ifndef PRODUCT
2112 address TemplateInterpreterGenerator::generate_trace_code(TosState state) {
2113 address entry = __ pc();
2114
2115 __ protect_return_address();
2116 __ push(lr);
2117 __ push(state);
2118 __ push(RegSet::range(r0, r15), sp);
2119 __ mov(c_rarg2, r0); // Pass itos
2120 __ call_VM(noreg,
2121 CAST_FROM_FN_PTR(address, InterpreterRuntime::trace_bytecode),
2122 c_rarg1, c_rarg2, c_rarg3);
2123 __ pop(RegSet::range(r0, r15), sp);
2124 __ pop(state);
2125 __ pop(lr);
2126 __ authenticate_return_address();
2127 __ ret(lr); // return from result handler
2128
2129 return entry;
2130 }
2131 #endif // PRODUCT
2132
2133 void TemplateInterpreterGenerator::count_bytecode() {
2134 if (CountBytecodesPerThread) {
2135 Address bc_counter_addr(rthread, Thread::bc_counter_offset());
2136 __ ldr(r10, bc_counter_addr);
2137 __ add(r10, r10, 1);
2138 __ str(r10, bc_counter_addr);
2139 }
2140 if (CountBytecodes || TraceBytecodes || StopInterpreterAt > 0) {
2141 __ mov(r10, (address) &BytecodeCounter::_counter_value);
2142 __ atomic_add(noreg, 1, r10);
2143 }
2144 }
2145
2146 void TemplateInterpreterGenerator::histogram_bytecode(Template* t) {
2147 __ mov(r10, (address) &BytecodeHistogram::_counters[t->bytecode()]);
2148 __ atomic_addw(noreg, 1, r10);
2149 }
2150
2151 #ifndef PRODUCT
2152 void TemplateInterpreterGenerator::histogram_bytecode_pair(Template* t) {
2153 // Calculate new index for counter:
2154 // _index = (_index >> log2_number_of_codes) |
2155 // (bytecode << log2_number_of_codes);
2156 Register index_addr = rscratch1;
2157 Register index = rscratch2;
2158 __ mov(index_addr, (address) &BytecodePairHistogram::_index);
2159 __ ldrw(index, index_addr);
2160 __ mov(r10,
2161 ((int)t->bytecode()) << BytecodePairHistogram::log2_number_of_codes);
2162 __ orrw(index, r10, index, Assembler::LSR,
2163 BytecodePairHistogram::log2_number_of_codes);
2164 __ strw(index, index_addr);
2165
2166 // Bump bucket contents:
2167 // _counters[_index] ++;
2168 Register counter_addr = rscratch1;
2169 __ mov(r10, (address) &BytecodePairHistogram::_counters);
2170 __ lea(counter_addr, Address(r10, index, Address::lsl(LogBytesPerInt)));
2171 __ atomic_addw(noreg, 1, counter_addr);
|