< prev index next >

src/hotspot/cpu/x86/templateInterpreterGenerator_x86.cpp

Print this page

 370   default       : ShouldNotReachHere();
 371   }
 372   __ ret(0);                                   // return from result handler
 373   return entry;
 374 }
 375 
 376 address TemplateInterpreterGenerator::generate_safept_entry_for(
 377         TosState state,
 378         address runtime_entry) {
 379   address entry = __ pc();
 380 
 381   __ push(state);
 382   __ push_cont_fastpath();
 383   __ call_VM(noreg, runtime_entry);
 384   __ pop_cont_fastpath();
 385 
 386   __ dispatch_via(vtos, Interpreter::_normal_table.table_for(vtos));
 387   return entry;
 388 }
 389 
























 390 
 391 
 392 // Helpers for commoning out cases in the various type of method entries.
 393 //
 394 
 395 
 396 // increment invocation count & check for overflow
 397 //
 398 // Note: checking for negative value instead of overflow
 399 //       so we have a 'sticky' overflow test
 400 //
 401 // rbx: method
 402 // rcx: invocation counter
 403 //
 404 void TemplateInterpreterGenerator::generate_counter_incr(Label* overflow) {
 405   Label done;
 406   // Note: In tiered we increment either counters in Method* or in MDO depending if we're profiling or not.
 407   Label no_mdo;
 408   if (ProfileInterpreter) {
 409     // Are we profiling?

 370   default       : ShouldNotReachHere();
 371   }
 372   __ ret(0);                                   // return from result handler
 373   return entry;
 374 }
 375 
 376 address TemplateInterpreterGenerator::generate_safept_entry_for(
 377         TosState state,
 378         address runtime_entry) {
 379   address entry = __ pc();
 380 
 381   __ push(state);
 382   __ push_cont_fastpath();
 383   __ call_VM(noreg, runtime_entry);
 384   __ pop_cont_fastpath();
 385 
 386   __ dispatch_via(vtos, Interpreter::_normal_table.table_for(vtos));
 387   return entry;
 388 }
 389 
 390 address TemplateInterpreterGenerator::generate_cont_preempt_rerun_interpreter_adapter() {
 391   if (!Continuations::enabled()) return nullptr;
 392   address start = __ pc();
 393 
 394   __ pop(rbp);
 395 
 396   // We will return to the intermediate call made in call_VM skipping the restoration
 397   // of bcp and locals done in InterpreterMacroAssembler::call_VM_base, so fix them here.
 398   __ restore_bcp();
 399   __ restore_locals();
 400 
 401   // Get return address before adjusting rsp
 402   __ movptr(rax, Address(rsp, 0));
 403 
 404   // Restore stack bottom
 405   __ movptr(rcx, Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize));
 406   __ lea(rsp, Address(rbp, rcx, Address::times_ptr));
 407   // and NULL it as marker that esp is now tos until next java call
 408   __ movptr(Address(rbp, frame::interpreter_frame_last_sp_offset * wordSize), NULL_WORD);
 409 
 410   __ jmp(rax);
 411 
 412   return start;
 413 }
 414 
 415 
 416 // Helpers for commoning out cases in the various type of method entries.
 417 //
 418 
 419 
 420 // increment invocation count & check for overflow
 421 //
 422 // Note: checking for negative value instead of overflow
 423 //       so we have a 'sticky' overflow test
 424 //
 425 // rbx: method
 426 // rcx: invocation counter
 427 //
 428 void TemplateInterpreterGenerator::generate_counter_incr(Label* overflow) {
 429   Label done;
 430   // Note: In tiered we increment either counters in Method* or in MDO depending if we're profiling or not.
 431   Label no_mdo;
 432   if (ProfileInterpreter) {
 433     // Are we profiling?
< prev index next >