21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "compiler/compiler_globals.hpp"
28 #include "compiler/disassembler.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "interpreter/bytecodeHistogram.hpp"
31 #include "interpreter/interp_masm.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/templateInterpreterGenerator.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "oops/arrayOop.hpp"
37 #include "oops/methodCounters.hpp"
38 #include "oops/methodData.hpp"
39 #include "oops/method.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/resolvedIndyEntry.hpp"
42 #include "oops/resolvedMethodEntry.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/jvmtiThreadState.hpp"
45 #include "runtime/continuation.hpp"
46 #include "runtime/deoptimization.hpp"
47 #include "runtime/frame.inline.hpp"
48 #include "runtime/globals.hpp"
49 #include "runtime/jniHandles.hpp"
50 #include "runtime/sharedRuntime.hpp"
51 #include "runtime/stubRoutines.hpp"
52 #include "runtime/synchronizer.hpp"
53 #include "runtime/timer.hpp"
54 #include "runtime/vframeArray.hpp"
55 #include "utilities/checkedCast.hpp"
56 #include "utilities/debug.hpp"
57 #include "utilities/macros.hpp"
58
59 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
60
61 // Size of interpreter code. Increase if too small. Interpreter will
62 // fail with a guarantee ("not enough space for interpreter generation");
63 // if too small.
64 // Run with +PrintInterpreter to get the VM to print out the size.
65 // Max size with JVMTI
66 #ifdef AMD64
67 int TemplateInterpreter::InterpreterCodeSize = JVMCI_ONLY(268) NOT_JVMCI(256) * 1024;
68 #else
69 int TemplateInterpreter::InterpreterCodeSize = 224 * 1024;
70 #endif // AMD64
71
72 // Global Register Names
73 static const Register rbcp = LP64_ONLY(r13) NOT_LP64(rsi);
74 static const Register rlocals = LP64_ONLY(r14) NOT_LP64(rdi);
75
76 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
77 const int bcp_offset = frame::interpreter_frame_bcp_offset * wordSize;
78 const int locals_offset = frame::interpreter_frame_locals_offset * wordSize;
79
80
81 //-----------------------------------------------------------------------------
82
83 address TemplateInterpreterGenerator::generate_StackOverflowError_handler() {
84 address entry = __ pc();
85
86 #ifdef ASSERT
87 {
192 __ ffree(i);
193 }
194 } else if (UseSSE < 2) {
195 __ empty_FPU_stack();
196 }
197 #endif // COMPILER2
198 if ((state == ftos && UseSSE < 1) || (state == dtos && UseSSE < 2)) {
199 __ MacroAssembler::verify_FPU(1, "generate_return_entry_for compiled");
200 } else {
201 __ MacroAssembler::verify_FPU(0, "generate_return_entry_for compiled");
202 }
203
204 if (state == ftos) {
205 __ MacroAssembler::verify_FPU(UseSSE >= 1 ? 0 : 1, "generate_return_entry_for in interpreter");
206 } else if (state == dtos) {
207 __ MacroAssembler::verify_FPU(UseSSE >= 2 ? 0 : 1, "generate_return_entry_for in interpreter");
208 }
209 #endif // _LP64
210
211 // Restore stack bottom in case i2c adjusted stack
212 __ movptr(rcx, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize));
213 __ lea(rsp, Address(rbp, rcx, Address::times_ptr));
214 // and null it as marker that esp is now tos until next java call
215 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD);
216
217 __ restore_bcp();
218 __ restore_locals();
219
220 if (state == atos) {
221 Register mdp = rbx;
222 Register tmp = rcx;
223 __ profile_return_type(mdp, rax, tmp);
224 }
225
226 const Register cache = rbx;
227 const Register index = rcx;
228 if (index_size == sizeof(u4)) {
229 __ load_resolved_indy_entry(cache, index);
230 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedIndyEntry::num_parameters_offset())));
231 __ lea(rsp, Address(rsp, cache, Interpreter::stackElementScale()));
232 } else {
233 assert(index_size == sizeof(u2), "Can only be u2");
234 __ load_method_entry(cache, index);
235 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedMethodEntry::num_parameters_offset())));
236 __ lea(rsp, Address(rsp, cache, Interpreter::stackElementScale()));
|
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "compiler/compiler_globals.hpp"
28 #include "compiler/disassembler.hpp"
29 #include "gc/shared/barrierSetAssembler.hpp"
30 #include "interpreter/bytecodeHistogram.hpp"
31 #include "interpreter/interp_masm.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/interpreterRuntime.hpp"
34 #include "interpreter/templateInterpreterGenerator.hpp"
35 #include "interpreter/templateTable.hpp"
36 #include "oops/arrayOop.hpp"
37 #include "oops/methodCounters.hpp"
38 #include "oops/methodData.hpp"
39 #include "oops/method.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "oops/inlineKlass.hpp"
42 #include "oops/resolvedIndyEntry.hpp"
43 #include "oops/resolvedMethodEntry.hpp"
44 #include "prims/jvmtiExport.hpp"
45 #include "prims/jvmtiThreadState.hpp"
46 #include "runtime/continuation.hpp"
47 #include "runtime/deoptimization.hpp"
48 #include "runtime/frame.inline.hpp"
49 #include "runtime/globals.hpp"
50 #include "runtime/jniHandles.hpp"
51 #include "runtime/sharedRuntime.hpp"
52 #include "runtime/stubRoutines.hpp"
53 #include "runtime/synchronizer.hpp"
54 #include "runtime/timer.hpp"
55 #include "runtime/vframeArray.hpp"
56 #include "utilities/checkedCast.hpp"
57 #include "utilities/debug.hpp"
58 #include "utilities/macros.hpp"
59
60 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
61
62 // Size of interpreter code. Increase if too small. Interpreter will
63 // fail with a guarantee ("not enough space for interpreter generation");
64 // if too small.
65 // Run with +PrintInterpreter to get the VM to print out the size.
66 // Max size with JVMTI
67 #ifdef AMD64
68 int TemplateInterpreter::InterpreterCodeSize = JVMCI_ONLY(280) NOT_JVMCI(268) * 1024;
69 #else
70 int TemplateInterpreter::InterpreterCodeSize = 224 * 1024;
71 #endif // AMD64
72
73 // Global Register Names
74 static const Register rbcp = LP64_ONLY(r13) NOT_LP64(rsi);
75 static const Register rlocals = LP64_ONLY(r14) NOT_LP64(rdi);
76
77 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
78 const int bcp_offset = frame::interpreter_frame_bcp_offset * wordSize;
79 const int locals_offset = frame::interpreter_frame_locals_offset * wordSize;
80
81
82 //-----------------------------------------------------------------------------
83
84 address TemplateInterpreterGenerator::generate_StackOverflowError_handler() {
85 address entry = __ pc();
86
87 #ifdef ASSERT
88 {
193 __ ffree(i);
194 }
195 } else if (UseSSE < 2) {
196 __ empty_FPU_stack();
197 }
198 #endif // COMPILER2
199 if ((state == ftos && UseSSE < 1) || (state == dtos && UseSSE < 2)) {
200 __ MacroAssembler::verify_FPU(1, "generate_return_entry_for compiled");
201 } else {
202 __ MacroAssembler::verify_FPU(0, "generate_return_entry_for compiled");
203 }
204
205 if (state == ftos) {
206 __ MacroAssembler::verify_FPU(UseSSE >= 1 ? 0 : 1, "generate_return_entry_for in interpreter");
207 } else if (state == dtos) {
208 __ MacroAssembler::verify_FPU(UseSSE >= 2 ? 0 : 1, "generate_return_entry_for in interpreter");
209 }
210 #endif // _LP64
211
212 // Restore stack bottom in case i2c adjusted stack
213 __ movptr(rscratch1, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize));
214 __ lea(rsp, Address(rbp, rscratch1, Address::times_ptr));
215 // and null it as marker that esp is now tos until next java call
216 __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD);
217
218 if (state == atos && InlineTypeReturnedAsFields) {
219 __ store_inline_type_fields_to_buf(nullptr);
220 }
221
222 __ restore_bcp();
223 __ restore_locals();
224
225 if (state == atos) {
226 Register mdp = rbx;
227 Register tmp = rcx;
228 __ profile_return_type(mdp, rax, tmp);
229 }
230
231 const Register cache = rbx;
232 const Register index = rcx;
233 if (index_size == sizeof(u4)) {
234 __ load_resolved_indy_entry(cache, index);
235 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedIndyEntry::num_parameters_offset())));
236 __ lea(rsp, Address(rsp, cache, Interpreter::stackElementScale()));
237 } else {
238 assert(index_size == sizeof(u2), "Can only be u2");
239 __ load_method_entry(cache, index);
240 __ load_unsigned_short(cache, Address(cache, in_bytes(ResolvedMethodEntry::num_parameters_offset())));
241 __ lea(rsp, Address(rsp, cache, Interpreter::stackElementScale()));
|