< prev index next >

src/hotspot/cpu/x86/templateInterpreterGenerator_x86.cpp

Print this page

  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "classfile/javaClasses.hpp"
  28 #include "compiler/compiler_globals.hpp"
  29 #include "compiler/disassembler.hpp"
  30 #include "gc/shared/barrierSetAssembler.hpp"
  31 #include "interpreter/bytecodeHistogram.hpp"
  32 #include "interpreter/interp_masm.hpp"
  33 #include "interpreter/interpreter.hpp"
  34 #include "interpreter/interpreterRuntime.hpp"
  35 #include "interpreter/templateInterpreterGenerator.hpp"
  36 #include "interpreter/templateTable.hpp"
  37 #include "oops/arrayOop.hpp"
  38 #include "oops/methodData.hpp"
  39 #include "oops/method.hpp"
  40 #include "oops/oop.inline.hpp"

  41 #include "prims/jvmtiExport.hpp"
  42 #include "prims/jvmtiThreadState.hpp"
  43 #include "runtime/deoptimization.hpp"
  44 #include "runtime/frame.inline.hpp"
  45 #include "runtime/jniHandles.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 #include "runtime/stubRoutines.hpp"
  48 #include "runtime/synchronizer.hpp"
  49 #include "runtime/timer.hpp"
  50 #include "runtime/vframeArray.hpp"
  51 #include "utilities/debug.hpp"
  52 #include "utilities/macros.hpp"
  53 
  54 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  55 
  56 // Size of interpreter code.  Increase if too small.  Interpreter will
  57 // fail with a guarantee ("not enough space for interpreter generation");
  58 // if too small.
  59 // Run with +PrintInterpreter to get the VM to print out the size.
  60 // Max size with JVMTI
  61 #ifdef AMD64
  62 int TemplateInterpreter::InterpreterCodeSize = JVMCI_ONLY(268) NOT_JVMCI(256) * 1024;
  63 #else
  64 int TemplateInterpreter::InterpreterCodeSize = 224 * 1024;
  65 #endif // AMD64
  66 
  67 // Global Register Names
  68 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  69 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  70 
  71 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
  72 const int bcp_offset    = frame::interpreter_frame_bcp_offset    * wordSize;
  73 const int locals_offset = frame::interpreter_frame_locals_offset * wordSize;
  74 
  75 
  76 //-----------------------------------------------------------------------------
  77 
  78 address TemplateInterpreterGenerator::generate_StackOverflowError_handler() {
  79   address entry = __ pc();
  80 
  81 #ifdef ASSERT
  82   {

 190   }
 191 #endif // COMPILER2
 192   if ((state == ftos && UseSSE < 1) || (state == dtos && UseSSE < 2)) {
 193     __ MacroAssembler::verify_FPU(1, "generate_return_entry_for compiled");
 194   } else {
 195     __ MacroAssembler::verify_FPU(0, "generate_return_entry_for compiled");
 196   }
 197 
 198   if (state == ftos) {
 199     __ MacroAssembler::verify_FPU(UseSSE >= 1 ? 0 : 1, "generate_return_entry_for in interpreter");
 200   } else if (state == dtos) {
 201     __ MacroAssembler::verify_FPU(UseSSE >= 2 ? 0 : 1, "generate_return_entry_for in interpreter");
 202   }
 203 #endif // _LP64
 204 
 205   // Restore stack bottom in case i2c adjusted stack
 206   __ movptr(rsp, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize));
 207   // and NULL it as marker that esp is now tos until next java call
 208   __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD);
 209 




 210   __ restore_bcp();
 211   __ restore_locals();
 212 
 213   if (state == atos) {
 214     Register mdp = rbx;
 215     Register tmp = rcx;
 216     __ profile_return_type(mdp, rax, tmp);
 217   }
 218 
 219   const Register cache = rbx;
 220   const Register index = rcx;
 221   __ get_cache_and_index_at_bcp(cache, index, 1, index_size);
 222 
 223   const Register flags = cache;
 224   __ movl(flags, Address(cache, index, Address::times_ptr, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
 225   __ andl(flags, ConstantPoolCacheEntry::parameter_size_mask);
 226   __ lea(rsp, Address(rsp, flags, Interpreter::stackElementScale()));
 227 
 228    const Register java_thread = NOT_LP64(rcx) LP64_ONLY(r15_thread);
 229    if (JvmtiExport::can_pop_frame()) {

 332         // Store as float and empty fpu stack
 333         __ fstp_s(Address(rsp, 0));
 334         // and reload
 335         __ movflt(xmm0, Address(rsp, 0));
 336       } else if (type == T_DOUBLE && UseSSE >= 2 ) {
 337         __ movdbl(xmm0, Address(rsp, 0));
 338       } else {
 339         // restore ST0
 340         __ fld_d(Address(rsp, 0));
 341       }
 342       // and pop the temp
 343       __ addptr(rsp, 2 * wordSize);
 344       __ push(t);                           // restore return address
 345     }
 346     break;
 347 #else
 348   case T_FLOAT  : /* nothing to do */        break;
 349   case T_DOUBLE : /* nothing to do */        break;
 350 #endif // _LP64
 351 

 352   case T_OBJECT :
 353     // retrieve result from frame
 354     __ movptr(rax, Address(rbp, frame::interpreter_frame_oop_temp_offset*wordSize));
 355     // and verify it
 356     __ verify_oop(rax);
 357     break;
 358   default       : ShouldNotReachHere();
 359   }
 360   __ ret(0);                                   // return from result handler
 361   return entry;
 362 }
 363 
 364 address TemplateInterpreterGenerator::generate_safept_entry_for(
 365         TosState state,
 366         address runtime_entry) {
 367   address entry = __ pc();
 368   __ push(state);
 369   __ call_VM(noreg, runtime_entry);
 370   __ dispatch_via(vtos, Interpreter::_normal_table.table_for(vtos));
 371   return entry;

  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "classfile/javaClasses.hpp"
  28 #include "compiler/compiler_globals.hpp"
  29 #include "compiler/disassembler.hpp"
  30 #include "gc/shared/barrierSetAssembler.hpp"
  31 #include "interpreter/bytecodeHistogram.hpp"
  32 #include "interpreter/interp_masm.hpp"
  33 #include "interpreter/interpreter.hpp"
  34 #include "interpreter/interpreterRuntime.hpp"
  35 #include "interpreter/templateInterpreterGenerator.hpp"
  36 #include "interpreter/templateTable.hpp"
  37 #include "oops/arrayOop.hpp"
  38 #include "oops/methodData.hpp"
  39 #include "oops/method.hpp"
  40 #include "oops/oop.inline.hpp"
  41 #include "oops/inlineKlass.hpp"
  42 #include "prims/jvmtiExport.hpp"
  43 #include "prims/jvmtiThreadState.hpp"
  44 #include "runtime/deoptimization.hpp"
  45 #include "runtime/frame.inline.hpp"
  46 #include "runtime/jniHandles.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/stubRoutines.hpp"
  49 #include "runtime/synchronizer.hpp"
  50 #include "runtime/timer.hpp"
  51 #include "runtime/vframeArray.hpp"
  52 #include "utilities/debug.hpp"
  53 #include "utilities/macros.hpp"
  54 
  55 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
  56 
  57 // Size of interpreter code.  Increase if too small.  Interpreter will
  58 // fail with a guarantee ("not enough space for interpreter generation");
  59 // if too small.
  60 // Run with +PrintInterpreter to get the VM to print out the size.
  61 // Max size with JVMTI
  62 #ifdef AMD64
  63 int TemplateInterpreter::InterpreterCodeSize = JVMCI_ONLY(280) NOT_JVMCI(268) * 1024;
  64 #else
  65 int TemplateInterpreter::InterpreterCodeSize = 224 * 1024;
  66 #endif // AMD64
  67 
  68 // Global Register Names
  69 static const Register rbcp     = LP64_ONLY(r13) NOT_LP64(rsi);
  70 static const Register rlocals  = LP64_ONLY(r14) NOT_LP64(rdi);
  71 
  72 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
  73 const int bcp_offset    = frame::interpreter_frame_bcp_offset    * wordSize;
  74 const int locals_offset = frame::interpreter_frame_locals_offset * wordSize;
  75 
  76 
  77 //-----------------------------------------------------------------------------
  78 
  79 address TemplateInterpreterGenerator::generate_StackOverflowError_handler() {
  80   address entry = __ pc();
  81 
  82 #ifdef ASSERT
  83   {

 191   }
 192 #endif // COMPILER2
 193   if ((state == ftos && UseSSE < 1) || (state == dtos && UseSSE < 2)) {
 194     __ MacroAssembler::verify_FPU(1, "generate_return_entry_for compiled");
 195   } else {
 196     __ MacroAssembler::verify_FPU(0, "generate_return_entry_for compiled");
 197   }
 198 
 199   if (state == ftos) {
 200     __ MacroAssembler::verify_FPU(UseSSE >= 1 ? 0 : 1, "generate_return_entry_for in interpreter");
 201   } else if (state == dtos) {
 202     __ MacroAssembler::verify_FPU(UseSSE >= 2 ? 0 : 1, "generate_return_entry_for in interpreter");
 203   }
 204 #endif // _LP64
 205 
 206   // Restore stack bottom in case i2c adjusted stack
 207   __ movptr(rsp, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize));
 208   // and NULL it as marker that esp is now tos until next java call
 209   __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), (int32_t)NULL_WORD);
 210 
 211   if (state == atos && InlineTypeReturnedAsFields) {
 212     __ store_inline_type_fields_to_buf(NULL);
 213   }
 214 
 215   __ restore_bcp();
 216   __ restore_locals();
 217 
 218   if (state == atos) {
 219     Register mdp = rbx;
 220     Register tmp = rcx;
 221     __ profile_return_type(mdp, rax, tmp);
 222   }
 223 
 224   const Register cache = rbx;
 225   const Register index = rcx;
 226   __ get_cache_and_index_at_bcp(cache, index, 1, index_size);
 227 
 228   const Register flags = cache;
 229   __ movl(flags, Address(cache, index, Address::times_ptr, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset()));
 230   __ andl(flags, ConstantPoolCacheEntry::parameter_size_mask);
 231   __ lea(rsp, Address(rsp, flags, Interpreter::stackElementScale()));
 232 
 233    const Register java_thread = NOT_LP64(rcx) LP64_ONLY(r15_thread);
 234    if (JvmtiExport::can_pop_frame()) {

 337         // Store as float and empty fpu stack
 338         __ fstp_s(Address(rsp, 0));
 339         // and reload
 340         __ movflt(xmm0, Address(rsp, 0));
 341       } else if (type == T_DOUBLE && UseSSE >= 2 ) {
 342         __ movdbl(xmm0, Address(rsp, 0));
 343       } else {
 344         // restore ST0
 345         __ fld_d(Address(rsp, 0));
 346       }
 347       // and pop the temp
 348       __ addptr(rsp, 2 * wordSize);
 349       __ push(t);                           // restore return address
 350     }
 351     break;
 352 #else
 353   case T_FLOAT  : /* nothing to do */        break;
 354   case T_DOUBLE : /* nothing to do */        break;
 355 #endif // _LP64
 356 
 357   case T_INLINE_TYPE: // fall through (inline types are handled with oops)
 358   case T_OBJECT :
 359     // retrieve result from frame
 360     __ movptr(rax, Address(rbp, frame::interpreter_frame_oop_temp_offset*wordSize));
 361     // and verify it
 362     __ verify_oop(rax);
 363     break;
 364   default       : ShouldNotReachHere();
 365   }
 366   __ ret(0);                                   // return from result handler
 367   return entry;
 368 }
 369 
 370 address TemplateInterpreterGenerator::generate_safept_entry_for(
 371         TosState state,
 372         address runtime_entry) {
 373   address entry = __ pc();
 374   __ push(state);
 375   __ call_VM(noreg, runtime_entry);
 376   __ dispatch_via(vtos, Interpreter::_normal_table.table_for(vtos));
 377   return entry;
< prev index next >