< prev index next >

src/hotspot/share/c1/c1_LIRAssembler.cpp

Print this page




  89 PatchingStub::PatchID LIR_Assembler::patching_id(CodeEmitInfo* info) {
  90   IRScope* scope = info->scope();
  91   Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());
  92   if (Bytecodes::has_optional_appendix(bc_raw)) {
  93     return PatchingStub::load_appendix_id;
  94   }
  95   return PatchingStub::load_mirror_id;
  96 }
  97 
  98 //---------------------------------------------------------------
  99 
 100 
 101 LIR_Assembler::LIR_Assembler(Compilation* c):
 102    _masm(c->masm())
 103  , _bs(BarrierSet::barrier_set())
 104  , _compilation(c)
 105  , _frame_map(c->frame_map())
 106  , _current_block(NULL)
 107  , _pending_non_safepoint(NULL)
 108  , _pending_non_safepoint_offset(0)

 109 {
 110   _slow_case_stubs = new CodeStubList();
 111 }
 112 
 113 
 114 LIR_Assembler::~LIR_Assembler() {
 115   // The unwind handler label may be unnbound if this destructor is invoked because of a bail-out.
 116   // Reset it here to avoid an assertion.
 117   _unwind_handler_entry.reset();
 118 }
 119 
 120 
 121 void LIR_Assembler::check_codespace() {
 122   CodeSection* cs = _masm->code_section();
 123   if (cs->remaining() < (int)(NOT_LP64(1*K)LP64_ONLY(2*K))) {
 124     BAILOUT("CodeBuffer overflow");
 125   }
 126 }
 127 
 128 
 129 void LIR_Assembler::append_code_stub(CodeStub* stub) {

 130   _slow_case_stubs->append(stub);
 131 }
 132 
 133 void LIR_Assembler::emit_stubs(CodeStubList* stub_list) {
 134   for (int m = 0; m < stub_list->length(); m++) {
 135     CodeStub* s = stub_list->at(m);
 136 
 137     check_codespace();
 138     CHECK_BAILOUT();
 139 
 140 #ifndef PRODUCT
 141     if (CommentedAssembly) {
 142       stringStream st;
 143       s->print_name(&st);
 144       st.print(" slow case");
 145       _masm->block_comment(st.as_string());
 146     }
 147 #endif
 148     s->emit_code(this);
 149 #ifdef ASSERT


 457   CHECK_BAILOUT();
 458 
 459   switch (op->code()) {
 460   case lir_static_call:
 461   case lir_dynamic_call:
 462     call(op, relocInfo::static_call_type);
 463     break;
 464   case lir_optvirtual_call:
 465     call(op, relocInfo::opt_virtual_call_type);
 466     break;
 467   case lir_icvirtual_call:
 468     ic_call(op);
 469     break;
 470   case lir_virtual_call:
 471     vtable_call(op);
 472     break;
 473   default:
 474     fatal("unexpected op code: %s", op->name());
 475     break;
 476   }

 477 
 478   // JSR 292
 479   // Record if this method has MethodHandle invokes.
 480   if (op->is_method_handle_invoke()) {
 481     compilation()->set_has_method_handle_invokes(true);
 482   }
 483 
 484 #if defined(X86) && defined(TIERED)
 485   // C2 leave fpu stack dirty clean it
 486   if (UseSSE < 2) {
 487     int i;
 488     for ( i = 1; i <= 7 ; i++ ) {
 489       ffree(i);
 490     }
 491     if (!op->result_opr()->is_float_kind()) {
 492       ffree(0);
 493     }
 494   }
 495 #endif // X86 && TIERED
 496 }


 670 
 671     case lir_membar_storestore:
 672       membar_storestore();
 673       break;
 674 
 675     case lir_membar_loadstore:
 676       membar_loadstore();
 677       break;
 678 
 679     case lir_membar_storeload:
 680       membar_storeload();
 681       break;
 682 
 683     case lir_get_thread:
 684       get_thread(op->result_opr());
 685       break;
 686 
 687     case lir_on_spin_wait:
 688       on_spin_wait();
 689       break;









 690 
 691     default:
 692       ShouldNotReachHere();
 693       break;
 694   }
 695 }
 696 
 697 
 698 void LIR_Assembler::emit_op2(LIR_Op2* op) {
 699   switch (op->code()) {
 700     case lir_cmp:
 701       if (op->info() != NULL) {
 702         assert(op->in_opr1()->is_address() || op->in_opr2()->is_address(),
 703                "shouldn't be codeemitinfo for non-address operands");
 704         add_debug_info_for_null_check_here(op->info()); // exception possible
 705       }
 706       comp_op(op->condition(), op->in_opr1(), op->in_opr2(), op);
 707       break;
 708 
 709     case lir_cmp_l2i:




  89 PatchingStub::PatchID LIR_Assembler::patching_id(CodeEmitInfo* info) {
  90   IRScope* scope = info->scope();
  91   Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());
  92   if (Bytecodes::has_optional_appendix(bc_raw)) {
  93     return PatchingStub::load_appendix_id;
  94   }
  95   return PatchingStub::load_mirror_id;
  96 }
  97 
  98 //---------------------------------------------------------------
  99 
 100 
 101 LIR_Assembler::LIR_Assembler(Compilation* c):
 102    _masm(c->masm())
 103  , _bs(BarrierSet::barrier_set())
 104  , _compilation(c)
 105  , _frame_map(c->frame_map())
 106  , _current_block(NULL)
 107  , _pending_non_safepoint(NULL)
 108  , _pending_non_safepoint_offset(0)
 109  , _immediate_oops_patched(0)
 110 {
 111   _slow_case_stubs = new CodeStubList();
 112 }
 113 
 114 
 115 LIR_Assembler::~LIR_Assembler() {
 116   // The unwind handler label may be unnbound if this destructor is invoked because of a bail-out.
 117   // Reset it here to avoid an assertion.
 118   _unwind_handler_entry.reset();
 119 }
 120 
 121 
 122 void LIR_Assembler::check_codespace() {
 123   CodeSection* cs = _masm->code_section();
 124   if (cs->remaining() < (int)(NOT_LP64(1*K)LP64_ONLY(2*K))) {
 125     BAILOUT("CodeBuffer overflow");
 126   }
 127 }
 128 
 129 
 130 void LIR_Assembler::append_code_stub(CodeStub* stub) {
 131   _immediate_oops_patched += stub->nr_immediate_oops_patched();
 132   _slow_case_stubs->append(stub);
 133 }
 134 
 135 void LIR_Assembler::emit_stubs(CodeStubList* stub_list) {
 136   for (int m = 0; m < stub_list->length(); m++) {
 137     CodeStub* s = stub_list->at(m);
 138 
 139     check_codespace();
 140     CHECK_BAILOUT();
 141 
 142 #ifndef PRODUCT
 143     if (CommentedAssembly) {
 144       stringStream st;
 145       s->print_name(&st);
 146       st.print(" slow case");
 147       _masm->block_comment(st.as_string());
 148     }
 149 #endif
 150     s->emit_code(this);
 151 #ifdef ASSERT


 459   CHECK_BAILOUT();
 460 
 461   switch (op->code()) {
 462   case lir_static_call:
 463   case lir_dynamic_call:
 464     call(op, relocInfo::static_call_type);
 465     break;
 466   case lir_optvirtual_call:
 467     call(op, relocInfo::opt_virtual_call_type);
 468     break;
 469   case lir_icvirtual_call:
 470     ic_call(op);
 471     break;
 472   case lir_virtual_call:
 473     vtable_call(op);
 474     break;
 475   default:
 476     fatal("unexpected op code: %s", op->name());
 477     break;
 478   }
 479   // oopmap_metadata(-1); // TODO: maybe here instead of in call and ic_call ?
 480 
 481   // JSR 292
 482   // Record if this method has MethodHandle invokes.
 483   if (op->is_method_handle_invoke()) {
 484     compilation()->set_has_method_handle_invokes(true);
 485   }
 486 
 487 #if defined(X86) && defined(TIERED)
 488   // C2 leave fpu stack dirty clean it
 489   if (UseSSE < 2) {
 490     int i;
 491     for ( i = 1; i <= 7 ; i++ ) {
 492       ffree(i);
 493     }
 494     if (!op->result_opr()->is_float_kind()) {
 495       ffree(0);
 496     }
 497   }
 498 #endif // X86 && TIERED
 499 }


 673 
 674     case lir_membar_storestore:
 675       membar_storestore();
 676       break;
 677 
 678     case lir_membar_loadstore:
 679       membar_loadstore();
 680       break;
 681 
 682     case lir_membar_storeload:
 683       membar_storeload();
 684       break;
 685 
 686     case lir_get_thread:
 687       get_thread(op->result_opr());
 688       break;
 689 
 690     case lir_on_spin_wait:
 691       on_spin_wait();
 692       break;
 693 
 694     case lir_getfp:
 695       getfp(op->result_opr());
 696       break;
 697 
 698     case lir_getsp:
 699       getsp(op->result_opr());
 700       break;
 701 
 702 
 703     default:
 704       ShouldNotReachHere();
 705       break;
 706   }
 707 }
 708 
 709 
 710 void LIR_Assembler::emit_op2(LIR_Op2* op) {
 711   switch (op->code()) {
 712     case lir_cmp:
 713       if (op->info() != NULL) {
 714         assert(op->in_opr1()->is_address() || op->in_opr2()->is_address(),
 715                "shouldn't be codeemitinfo for non-address operands");
 716         add_debug_info_for_null_check_here(op->info()); // exception possible
 717       }
 718       comp_op(op->condition(), op->in_opr1(), op->in_opr2(), op);
 719       break;
 720 
 721     case lir_cmp_l2i:


< prev index next >