< prev index next >

src/hotspot/share/c1/c1_LIRAssembler.cpp

Print this page

 91 PatchingStub::PatchID LIR_Assembler::patching_id(CodeEmitInfo* info) {
 92   IRScope* scope = info->scope();
 93   Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());
 94   if (Bytecodes::has_optional_appendix(bc_raw)) {
 95     return PatchingStub::load_appendix_id;
 96   }
 97   return PatchingStub::load_mirror_id;
 98 }
 99 
100 //---------------------------------------------------------------
101 
102 
103 LIR_Assembler::LIR_Assembler(Compilation* c):
104    _masm(c->masm())
105  , _bs(BarrierSet::barrier_set())
106  , _compilation(c)
107  , _frame_map(c->frame_map())
108  , _current_block(NULL)
109  , _pending_non_safepoint(NULL)
110  , _pending_non_safepoint_offset(0)

111 {
112   _slow_case_stubs = new CodeStubList();
113 }
114 
115 
116 LIR_Assembler::~LIR_Assembler() {
117   // The unwind handler label may be unnbound if this destructor is invoked because of a bail-out.
118   // Reset it here to avoid an assertion.
119   _unwind_handler_entry.reset();
120 }
121 
122 
123 void LIR_Assembler::check_codespace() {
124   CodeSection* cs = _masm->code_section();
125   if (cs->remaining() < (int)(NOT_LP64(1*K)LP64_ONLY(2*K))) {
126     BAILOUT("CodeBuffer overflow");
127   }
128 }
129 
130 
131 void LIR_Assembler::append_code_stub(CodeStub* stub) {

132   _slow_case_stubs->append(stub);
133 }
134 
135 void LIR_Assembler::emit_stubs(CodeStubList* stub_list) {
136   for (int m = 0; m < stub_list->length(); m++) {
137     CodeStub* s = stub_list->at(m);
138 
139     check_codespace();
140     CHECK_BAILOUT();
141 
142 #ifndef PRODUCT
143     if (CommentedAssembly) {
144       stringStream st;
145       s->print_name(&st);
146       st.print(" slow case");
147       _masm->block_comment(st.as_string());
148     }
149 #endif
150     s->emit_code(this);
151 #ifdef ASSERT

456 
457   // emit the static call stub stuff out of line
458   emit_static_call_stub();
459   CHECK_BAILOUT();
460 
461   switch (op->code()) {
462   case lir_static_call:
463   case lir_dynamic_call:
464     call(op, relocInfo::static_call_type);
465     break;
466   case lir_optvirtual_call:
467     call(op, relocInfo::opt_virtual_call_type);
468     break;
469   case lir_icvirtual_call:
470     ic_call(op);
471     break;
472   default:
473     fatal("unexpected op code: %s", op->name());
474     break;
475   }

476 
477   // JSR 292
478   // Record if this method has MethodHandle invokes.
479   if (op->is_method_handle_invoke()) {
480     compilation()->set_has_method_handle_invokes(true);
481   }
482 
483 #if defined(IA32) && defined(COMPILER2)
484   // C2 leave fpu stack dirty clean it
485   if (UseSSE < 2 && !CompilerConfig::is_c1_only_no_jvmci()) {
486     int i;
487     for ( i = 1; i <= 7 ; i++ ) {
488       ffree(i);
489     }
490     if (!op->result_opr()->is_float_kind()) {
491       ffree(0);
492     }
493   }
494 #endif // IA32 && COMPILER2
495 }

648     case lir_membar_storestore:
649       membar_storestore();
650       break;
651 
652     case lir_membar_loadstore:
653       membar_loadstore();
654       break;
655 
656     case lir_membar_storeload:
657       membar_storeload();
658       break;
659 
660     case lir_get_thread:
661       get_thread(op->result_opr());
662       break;
663 
664     case lir_on_spin_wait:
665       on_spin_wait();
666       break;
667 

668     default:
669       ShouldNotReachHere();
670       break;
671   }
672 }
673 
674 
675 void LIR_Assembler::emit_op2(LIR_Op2* op) {
676   switch (op->code()) {
677     case lir_cmp:
678       if (op->info() != NULL) {
679         assert(op->in_opr1()->is_address() || op->in_opr2()->is_address(),
680                "shouldn't be codeemitinfo for non-address operands");
681         add_debug_info_for_null_check_here(op->info()); // exception possible
682       }
683       comp_op(op->condition(), op->in_opr1(), op->in_opr2(), op);
684       break;
685 
686     case lir_cmp_l2i:
687     case lir_cmp_fd2i:

 91 PatchingStub::PatchID LIR_Assembler::patching_id(CodeEmitInfo* info) {
 92   IRScope* scope = info->scope();
 93   Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());
 94   if (Bytecodes::has_optional_appendix(bc_raw)) {
 95     return PatchingStub::load_appendix_id;
 96   }
 97   return PatchingStub::load_mirror_id;
 98 }
 99 
100 //---------------------------------------------------------------
101 
102 
103 LIR_Assembler::LIR_Assembler(Compilation* c):
104    _masm(c->masm())
105  , _bs(BarrierSet::barrier_set())
106  , _compilation(c)
107  , _frame_map(c->frame_map())
108  , _current_block(NULL)
109  , _pending_non_safepoint(NULL)
110  , _pending_non_safepoint_offset(0)
111  , _immediate_oops_patched(0)
112 {
113   _slow_case_stubs = new CodeStubList();
114 }
115 
116 
117 LIR_Assembler::~LIR_Assembler() {
118   // The unwind handler label may be unnbound if this destructor is invoked because of a bail-out.
119   // Reset it here to avoid an assertion.
120   _unwind_handler_entry.reset();
121 }
122 
123 
124 void LIR_Assembler::check_codespace() {
125   CodeSection* cs = _masm->code_section();
126   if (cs->remaining() < (int)(NOT_LP64(1*K)LP64_ONLY(2*K))) {
127     BAILOUT("CodeBuffer overflow");
128   }
129 }
130 
131 
132 void LIR_Assembler::append_code_stub(CodeStub* stub) {
133   _immediate_oops_patched += stub->nr_immediate_oops_patched();
134   _slow_case_stubs->append(stub);
135 }
136 
137 void LIR_Assembler::emit_stubs(CodeStubList* stub_list) {
138   for (int m = 0; m < stub_list->length(); m++) {
139     CodeStub* s = stub_list->at(m);
140 
141     check_codespace();
142     CHECK_BAILOUT();
143 
144 #ifndef PRODUCT
145     if (CommentedAssembly) {
146       stringStream st;
147       s->print_name(&st);
148       st.print(" slow case");
149       _masm->block_comment(st.as_string());
150     }
151 #endif
152     s->emit_code(this);
153 #ifdef ASSERT

458 
459   // emit the static call stub stuff out of line
460   emit_static_call_stub();
461   CHECK_BAILOUT();
462 
463   switch (op->code()) {
464   case lir_static_call:
465   case lir_dynamic_call:
466     call(op, relocInfo::static_call_type);
467     break;
468   case lir_optvirtual_call:
469     call(op, relocInfo::opt_virtual_call_type);
470     break;
471   case lir_icvirtual_call:
472     ic_call(op);
473     break;
474   default:
475     fatal("unexpected op code: %s", op->name());
476     break;
477   }
478   // oopmap_metadata(-1); // TODO: maybe here instead of in call and ic_call ?
479 
480   // JSR 292
481   // Record if this method has MethodHandle invokes.
482   if (op->is_method_handle_invoke()) {
483     compilation()->set_has_method_handle_invokes(true);
484   }
485 
486 #if defined(IA32) && defined(COMPILER2)
487   // C2 leave fpu stack dirty clean it
488   if (UseSSE < 2 && !CompilerConfig::is_c1_only_no_jvmci()) {
489     int i;
490     for ( i = 1; i <= 7 ; i++ ) {
491       ffree(i);
492     }
493     if (!op->result_opr()->is_float_kind()) {
494       ffree(0);
495     }
496   }
497 #endif // IA32 && COMPILER2
498 }

651     case lir_membar_storestore:
652       membar_storestore();
653       break;
654 
655     case lir_membar_loadstore:
656       membar_loadstore();
657       break;
658 
659     case lir_membar_storeload:
660       membar_storeload();
661       break;
662 
663     case lir_get_thread:
664       get_thread(op->result_opr());
665       break;
666 
667     case lir_on_spin_wait:
668       on_spin_wait();
669       break;
670 
671 
672     default:
673       ShouldNotReachHere();
674       break;
675   }
676 }
677 
678 
679 void LIR_Assembler::emit_op2(LIR_Op2* op) {
680   switch (op->code()) {
681     case lir_cmp:
682       if (op->info() != NULL) {
683         assert(op->in_opr1()->is_address() || op->in_opr2()->is_address(),
684                "shouldn't be codeemitinfo for non-address operands");
685         add_debug_info_for_null_check_here(op->info()); // exception possible
686       }
687       comp_op(op->condition(), op->in_opr1(), op->in_opr2(), op);
688       break;
689 
690     case lir_cmp_l2i:
691     case lir_cmp_fd2i:
< prev index next >