21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "compiler/compiler_globals.hpp"
28 #include "compiler/disassembler.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "interpreter/bytecodeHistogram.hpp"
31 #include "interpreter/interp_masm.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/templateInterpreterGenerator.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "oops/arrayOop.hpp"
37 #include "oops/methodCounters.hpp"
38 #include "oops/methodData.hpp"
39 #include "oops/method.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/resolvedIndyEntry.hpp"
42 #include "oops/resolvedMethodEntry.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/jvmtiThreadState.hpp"
45 #include "runtime/continuation.hpp"
46 #include "runtime/deoptimization.hpp"
47 #include "runtime/frame.inline.hpp"
48 #include "runtime/globals.hpp"
49 #include "runtime/jniHandles.hpp"
50 #include "runtime/sharedRuntime.hpp"
51 #include "runtime/stubRoutines.hpp"
52 #include "runtime/synchronizer.hpp"
53 #include "runtime/timer.hpp"
54 #include "runtime/vframeArray.hpp"
55 #include "utilities/checkedCast.hpp"
56 #include "utilities/debug.hpp"
57 #include "utilities/macros.hpp"
58
59 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
60
61 // Size of interpreter code. Increase if too small. Interpreter will
62 // fail with a guarantee ("not enough space for interpreter generation");
63 // if too small.
64 // Run with +PrintInterpreter to get the VM to print out the size.
65 // Max size with JVMTI
66 int TemplateInterpreter::InterpreterCodeSize = JVMCI_ONLY(268) NOT_JVMCI(256) * 1024;
67
68 // Global Register Names
69 static const Register rbcp = r13;
70 static const Register rlocals = r14;
71
72 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
73 const int bcp_offset = frame::interpreter_frame_bcp_offset * wordSize;
74 const int locals_offset = frame::interpreter_frame_locals_offset * wordSize;
75
76
77 //-----------------------------------------------------------------------------
78
79 address TemplateInterpreterGenerator::generate_StackOverflowError_handler() {
80 address entry = __ pc();
81
82 #ifdef ASSERT
83 {
84 Label L;
85 __ movptr(rax, Address(rbp,
86 frame::interpreter_frame_monitor_block_top_offset *
159 if (pass_oop) {
160 __ call_VM(rax, CAST_FROM_FN_PTR(address,
161 InterpreterRuntime::
162 create_klass_exception),
163 c_rarg1, c_rarg2);
164 } else {
165 __ lea(c_rarg2, ExternalAddress((address)message));
166 __ call_VM(rax,
167 CAST_FROM_FN_PTR(address, InterpreterRuntime::create_exception),
168 c_rarg1, c_rarg2);
169 }
170 // throw exception
171 __ jump(RuntimeAddress(Interpreter::throw_exception_entry()));
172 return entry;
173 }
174
175 address TemplateInterpreterGenerator::generate_return_entry_for(TosState state, int step, size_t index_size) {
176 address entry = __ pc();
177
178 // Restore stack bottom in case i2c adjusted stack
179 __ movptr(rcx, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize));
180 __ lea(rsp, Address(rbp, rcx, Address::times_ptr));
181 // and null it as marker that esp is now tos until next java call
182 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD);
183
184 __ restore_bcp();
185 __ restore_locals();
186
187 if (state == atos) {
188 Register mdp = rbx;
189 Register tmp = rcx;
190 __ profile_return_type(mdp, rax, tmp);
191 }
192
193 const Register cache = rbx;
194 const Register index = rcx;
195 if (index_size == sizeof(u4)) {
196 __ load_resolved_indy_entry(cache, index);
197 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedIndyEntry::num_parameters_offset())));
198 __ lea(rsp, Address(rsp, cache, Interpreter::stackElementScale()));
199 } else {
200 assert(index_size == sizeof(u2), "Can only be u2");
201 __ load_method_entry(cache, index);
202 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedMethodEntry::num_parameters_offset())));
203 __ lea(rsp, Address(rsp, cache, Interpreter::stackElementScale()));
1212 address entry_point = __ pc();
1213
1214 // abstract method entry
1215
1216 // pop return address, reset last_sp to null
1217 __ empty_expression_stack();
1218 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1219 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1220
1221 // throw exception
1222 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1223 // the call_VM checks for exception, so we should never return here.
1224 __ should_not_reach_here();
1225
1226 return entry_point;
1227 }
1228
1229 //
1230 // Generic interpreted method entry to (asm) interpreter
1231 //
1232 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
1233 // determine code generation flags
1234 bool inc_counter = UseCompiler || CountCompiledCalls;
1235
1236 // ebx: Method*
1237 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1238 address entry_point = __ pc();
1239
1240 const Address constMethod(rbx, Method::const_offset());
1241 const Address access_flags(rbx, Method::access_flags_offset());
1242 const Address size_of_parameters(rdx,
1243 ConstMethod::size_of_parameters_offset());
1244 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1245
1246
1247 // get parameter size (always needed)
1248 __ movptr(rdx, constMethod);
1249 __ load_unsigned_short(rcx, size_of_parameters);
1250
1251 // rbx: Method*
1252 // rcx: size of parameters
1333 // check for synchronized methods
1334 // Must happen AFTER invocation_counter check and stack overflow check,
1335 // so method is not locked if overflows.
1336 if (synchronized) {
1337 // Allocate monitor and lock method
1338 lock_method();
1339 } else {
1340 // no synchronization necessary
1341 #ifdef ASSERT
1342 {
1343 Label L;
1344 __ load_unsigned_short(rax, access_flags);
1345 __ testl(rax, JVM_ACC_SYNCHRONIZED);
1346 __ jcc(Assembler::zero, L);
1347 __ stop("method needs synchronization");
1348 __ bind(L);
1349 }
1350 #endif
1351 }
1352
1353 // start execution
1354 #ifdef ASSERT
1355 {
1356 Label L;
1357 const Address monitor_block_top (rbp,
1358 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1359 __ movptr(rax, monitor_block_top);
1360 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1361 __ cmpptr(rax, rsp);
1362 __ jcc(Assembler::equal, L);
1363 __ stop("broken stack frame setup in interpreter 6");
1364 __ bind(L);
1365 }
1366 #endif
1367
1368 // jvmti support
1369 __ notify_method_entry();
1370
1371 __ dispatch_next(vtos);
1372
|
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "compiler/compiler_globals.hpp"
28 #include "compiler/disassembler.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "interpreter/bytecodeHistogram.hpp"
31 #include "interpreter/interp_masm.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/templateInterpreterGenerator.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "oops/arrayOop.hpp"
37 #include "oops/methodCounters.hpp"
38 #include "oops/methodData.hpp"
39 #include "oops/method.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/inlineKlass.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/jvmtiThreadState.hpp"
46 #include "runtime/continuation.hpp"
47 #include "runtime/deoptimization.hpp"
48 #include "runtime/frame.inline.hpp"
49 #include "runtime/globals.hpp"
50 #include "runtime/jniHandles.hpp"
51 #include "runtime/sharedRuntime.hpp"
52 #include "runtime/stubRoutines.hpp"
53 #include "runtime/synchronizer.hpp"
54 #include "runtime/timer.hpp"
55 #include "runtime/vframeArray.hpp"
56 #include "utilities/checkedCast.hpp"
57 #include "utilities/debug.hpp"
58 #include "utilities/macros.hpp"
59
60 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
61
62 // Size of interpreter code. Increase if too small. Interpreter will
63 // fail with a guarantee ("not enough space for interpreter generation");
64 // if too small.
65 // Run with +PrintInterpreter to get the VM to print out the size.
66 // Max size with JVMTI
67 int TemplateInterpreter::InterpreterCodeSize = JVMCI_ONLY(280) NOT_JVMCI(268) * 1024;
68
69 // Global Register Names
70 static const Register rbcp = r13;
71 static const Register rlocals = r14;
72
73 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
74 const int bcp_offset = frame::interpreter_frame_bcp_offset * wordSize;
75 const int locals_offset = frame::interpreter_frame_locals_offset * wordSize;
76
77
78 //-----------------------------------------------------------------------------
79
80 address TemplateInterpreterGenerator::generate_StackOverflowError_handler() {
81 address entry = __ pc();
82
83 #ifdef ASSERT
84 {
85 Label L;
86 __ movptr(rax, Address(rbp,
87 frame::interpreter_frame_monitor_block_top_offset *
160 if (pass_oop) {
161 __ call_VM(rax, CAST_FROM_FN_PTR(address,
162 InterpreterRuntime::
163 create_klass_exception),
164 c_rarg1, c_rarg2);
165 } else {
166 __ lea(c_rarg2, ExternalAddress((address)message));
167 __ call_VM(rax,
168 CAST_FROM_FN_PTR(address, InterpreterRuntime::create_exception),
169 c_rarg1, c_rarg2);
170 }
171 // throw exception
172 __ jump(RuntimeAddress(Interpreter::throw_exception_entry()));
173 return entry;
174 }
175
176 address TemplateInterpreterGenerator::generate_return_entry_for(TosState state, int step, size_t index_size) {
177 address entry = __ pc();
178
179 // Restore stack bottom in case i2c adjusted stack
180 __ movptr(rscratch1, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize));
181 __ lea(rsp, Address(rbp, rscratch1, Address::times_ptr));
182 // and null it as marker that esp is now tos until next java call
183 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD);
184
185 if (state == atos && InlineTypeReturnedAsFields) {
186 __ store_inline_type_fields_to_buf(nullptr);
187 }
188
189 __ restore_bcp();
190 __ restore_locals();
191
192 if (state == atos) {
193 Register mdp = rbx;
194 Register tmp = rcx;
195 __ profile_return_type(mdp, rax, tmp);
196 }
197
198 const Register cache = rbx;
199 const Register index = rcx;
200 if (index_size == sizeof(u4)) {
201 __ load_resolved_indy_entry(cache, index);
202 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedIndyEntry::num_parameters_offset())));
203 __ lea(rsp, Address(rsp, cache, Interpreter::stackElementScale()));
204 } else {
205 assert(index_size == sizeof(u2), "Can only be u2");
206 __ load_method_entry(cache, index);
207 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedMethodEntry::num_parameters_offset())));
208 __ lea(rsp, Address(rsp, cache, Interpreter::stackElementScale()));
1217 address entry_point = __ pc();
1218
1219 // abstract method entry
1220
1221 // pop return address, reset last_sp to null
1222 __ empty_expression_stack();
1223 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
1224 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
1225
1226 // throw exception
1227 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodErrorWithMethod), rbx);
1228 // the call_VM checks for exception, so we should never return here.
1229 __ should_not_reach_here();
1230
1231 return entry_point;
1232 }
1233
1234 //
1235 // Generic interpreted method entry to (asm) interpreter
1236 //
1237 address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized, bool object_init) {
1238 // determine code generation flags
1239 bool inc_counter = UseCompiler || CountCompiledCalls;
1240
1241 // ebx: Method*
1242 // rbcp: sender sp (set in InterpreterMacroAssembler::prepare_to_jump_from_interpreted / generate_call_stub)
1243 address entry_point = __ pc();
1244
1245 const Address constMethod(rbx, Method::const_offset());
1246 const Address access_flags(rbx, Method::access_flags_offset());
1247 const Address size_of_parameters(rdx,
1248 ConstMethod::size_of_parameters_offset());
1249 const Address size_of_locals(rdx, ConstMethod::size_of_locals_offset());
1250
1251
1252 // get parameter size (always needed)
1253 __ movptr(rdx, constMethod);
1254 __ load_unsigned_short(rcx, size_of_parameters);
1255
1256 // rbx: Method*
1257 // rcx: size of parameters
1338 // check for synchronized methods
1339 // Must happen AFTER invocation_counter check and stack overflow check,
1340 // so method is not locked if overflows.
1341 if (synchronized) {
1342 // Allocate monitor and lock method
1343 lock_method();
1344 } else {
1345 // no synchronization necessary
1346 #ifdef ASSERT
1347 {
1348 Label L;
1349 __ load_unsigned_short(rax, access_flags);
1350 __ testl(rax, JVM_ACC_SYNCHRONIZED);
1351 __ jcc(Assembler::zero, L);
1352 __ stop("method needs synchronization");
1353 __ bind(L);
1354 }
1355 #endif
1356 }
1357
1358 // If object_init == true, we should insert a StoreStore barrier here to
1359 // prevent strict fields initial default values from being observable.
1360 // However, x86 is a TSO platform, so if `this` escapes, strict fields
1361 // initialized values are guaranteed to be the ones observed, so the
1362 // barrier can be elided.
1363
1364 // start execution
1365 #ifdef ASSERT
1366 {
1367 Label L;
1368 const Address monitor_block_top (rbp,
1369 frame::interpreter_frame_monitor_block_top_offset * wordSize);
1370 __ movptr(rax, monitor_block_top);
1371 __ lea(rax, Address(rbp, rax, Address::times_ptr));
1372 __ cmpptr(rax, rsp);
1373 __ jcc(Assembler::equal, L);
1374 __ stop("broken stack frame setup in interpreter 6");
1375 __ bind(L);
1376 }
1377 #endif
1378
1379 // jvmti support
1380 __ notify_method_entry();
1381
1382 __ dispatch_next(vtos);
1383
|