< prev index next >

src/hotspot/share/opto/output.cpp

Print this page

3364   masm.bind(fakeL);
3365   if (is_branch) {
3366     n->as_MachBranch()->save_label(&saveL, &save_bnum);
3367     n->as_MachBranch()->label_set(&fakeL, 0);
3368   }
3369   n->emit(&masm, C->regalloc());
3370 
3371   // Emitting into the scratch buffer should not fail
3372   assert (!C->failing(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3373 
3374   if (is_branch) // Restore label.
3375     n->as_MachBranch()->label_set(saveL, save_bnum);
3376 
3377   // End scratch_emit_size section.
3378   set_in_scratch_emit_size(false);
3379 
3380   return buf.insts_size();
3381 }
3382 
3383 void PhaseOutput::install() {
3384   if (!C->should_install_code()) {
3385     return;
3386   } else if (C->stub_function() != nullptr) {
3387     install_stub(C->stub_name());
3388   } else {
3389     install_code(C->method(),
3390                  C->entry_bci(),
3391                  CompileBroker::compiler2(),
3392                  C->has_unsafe_access(),
3393                  SharedRuntime::is_wide_vector(C->max_vector_size()),
3394                  C->rtm_state());
3395   }
3396 }
3397 
3398 void PhaseOutput::install_code(ciMethod*         target,
3399                                int               entry_bci,
3400                                AbstractCompiler* compiler,
3401                                bool              has_unsafe_access,
3402                                bool              has_wide_vectors,
3403                                RTMState          rtm_state) {
3404   // Check if we want to skip execution of all compiled code.
3405   {
3406 #ifndef PRODUCT
3407     if (OptoNoExecute) {
3408       C->record_method_not_compilable("+OptoNoExecute");  // Flag as failed
3409       return;
3410     }
3411 #endif

3418       if (!target->is_static()) {
3419         // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3420         // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3421         // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3422         _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3423       }
3424       _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3425       _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3426     }
3427 
3428     C->env()->register_method(target,
3429                                      entry_bci,
3430                                      &_code_offsets,
3431                                      _orig_pc_slot_offset_in_bytes,
3432                                      code_buffer(),
3433                                      frame_size_in_words(),
3434                                      oop_map_set(),
3435                                      &_handler_table,
3436                                      inc_table(),
3437                                      compiler,


3438                                      has_unsafe_access,
3439                                      SharedRuntime::is_wide_vector(C->max_vector_size()),
3440                                      C->has_monitors(),
3441                                      0,

3442                                      C->rtm_state());
3443 
3444     if (C->log() != nullptr) { // Print code cache state into compiler log
3445       C->log()->code_cache_state();
3446     }









3447   }
3448 }
3449 void PhaseOutput::install_stub(const char* stub_name) {
3450   // Entry point will be accessed using stub_entry_point();
3451   if (code_buffer() == nullptr) {
3452     Matcher::soft_match_failure();
3453   } else {
3454     if (PrintAssembly && (WizardMode || Verbose))
3455       tty->print_cr("### Stub::%s", stub_name);
3456 
3457     if (!C->failing()) {
3458       assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3459 
3460       // Make the NMethod
3461       // For now we mark the frame as never safe for profile stackwalking
3462       RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3463                                                       code_buffer(),
3464                                                       CodeOffsets::frame_never_safe,
3465                                                       // _code_offsets.value(CodeOffsets::Frame_Complete),
3466                                                       frame_size_in_words(),

3364   masm.bind(fakeL);
3365   if (is_branch) {
3366     n->as_MachBranch()->save_label(&saveL, &save_bnum);
3367     n->as_MachBranch()->label_set(&fakeL, 0);
3368   }
3369   n->emit(&masm, C->regalloc());
3370 
3371   // Emitting into the scratch buffer should not fail
3372   assert (!C->failing(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3373 
3374   if (is_branch) // Restore label.
3375     n->as_MachBranch()->label_set(saveL, save_bnum);
3376 
3377   // End scratch_emit_size section.
3378   set_in_scratch_emit_size(false);
3379 
3380   return buf.insts_size();
3381 }
3382 
3383 void PhaseOutput::install() {
3384   if (C->should_install_code() && C->stub_function() != nullptr) {


3385     install_stub(C->stub_name());
3386   } else {
3387     install_code(C->method(),
3388                  C->entry_bci(),
3389                  CompilerThread::current()->compiler(),
3390                  C->has_unsafe_access(),
3391                  SharedRuntime::is_wide_vector(C->max_vector_size()),
3392                  C->rtm_state());
3393   }
3394 }
3395 
3396 void PhaseOutput::install_code(ciMethod*         target,
3397                                int               entry_bci,
3398                                AbstractCompiler* compiler,
3399                                bool              has_unsafe_access,
3400                                bool              has_wide_vectors,
3401                                RTMState          rtm_state) {
3402   // Check if we want to skip execution of all compiled code.
3403   {
3404 #ifndef PRODUCT
3405     if (OptoNoExecute) {
3406       C->record_method_not_compilable("+OptoNoExecute");  // Flag as failed
3407       return;
3408     }
3409 #endif

3416       if (!target->is_static()) {
3417         // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3418         // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3419         // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3420         _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3421       }
3422       _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3423       _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3424     }
3425 
3426     C->env()->register_method(target,
3427                                      entry_bci,
3428                                      &_code_offsets,
3429                                      _orig_pc_slot_offset_in_bytes,
3430                                      code_buffer(),
3431                                      frame_size_in_words(),
3432                                      oop_map_set(),
3433                                      &_handler_table,
3434                                      inc_table(),
3435                                      compiler,
3436                                      C->has_clinit_barriers(),
3437                                      C->for_preload(),
3438                                      has_unsafe_access,
3439                                      SharedRuntime::is_wide_vector(C->max_vector_size()),
3440                                      C->has_monitors(),
3441                                      0,
3442                                      C->should_install_code(),
3443                                      C->rtm_state());
3444 
3445     if (C->log() != nullptr) { // Print code cache state into compiler log
3446       C->log()->code_cache_state();
3447     }
3448     if (C->has_clinit_barriers()) {
3449       assert(C->for_preload(), "sanity");
3450       // Build second version of code without class initialization barriers
3451       if (C->env()->task()->compile_reason() == CompileTask::Reason_PrecompileForPreload) {
3452         // don't automatically precompile a barrier-free version unless explicitly asked
3453       } else {
3454         C->record_failure(C2Compiler::retry_no_clinit_barriers());
3455       }
3456     }
3457   }
3458 }
3459 void PhaseOutput::install_stub(const char* stub_name) {
3460   // Entry point will be accessed using stub_entry_point();
3461   if (code_buffer() == nullptr) {
3462     Matcher::soft_match_failure();
3463   } else {
3464     if (PrintAssembly && (WizardMode || Verbose))
3465       tty->print_cr("### Stub::%s", stub_name);
3466 
3467     if (!C->failing()) {
3468       assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3469 
3470       // Make the NMethod
3471       // For now we mark the frame as never safe for profile stackwalking
3472       RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3473                                                       code_buffer(),
3474                                                       CodeOffsets::frame_never_safe,
3475                                                       // _code_offsets.value(CodeOffsets::Frame_Complete),
3476                                                       frame_size_in_words(),
< prev index next >