< prev index next >

src/hotspot/share/opto/output.cpp

Print this page

3362   masm.bind(fakeL);
3363   if (is_branch) {
3364     n->as_MachBranch()->save_label(&saveL, &save_bnum);
3365     n->as_MachBranch()->label_set(&fakeL, 0);
3366   }
3367   n->emit(&masm, C->regalloc());
3368 
3369   // Emitting into the scratch buffer should not fail
3370   assert (!C->failing(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3371 
3372   if (is_branch) // Restore label.
3373     n->as_MachBranch()->label_set(saveL, save_bnum);
3374 
3375   // End scratch_emit_size section.
3376   set_in_scratch_emit_size(false);
3377 
3378   return buf.insts_size();
3379 }
3380 
3381 void PhaseOutput::install() {
3382   if (!C->should_install_code()) {
3383     return;
3384   } else if (C->stub_function() != nullptr) {
3385     install_stub(C->stub_name());
3386   } else {
3387     install_code(C->method(),
3388                  C->entry_bci(),
3389                  CompileBroker::compiler2(),
3390                  C->has_unsafe_access(),
3391                  SharedRuntime::is_wide_vector(C->max_vector_size()));
3392   }
3393 }
3394 
3395 void PhaseOutput::install_code(ciMethod*         target,
3396                                int               entry_bci,
3397                                AbstractCompiler* compiler,
3398                                bool              has_unsafe_access,
3399                                bool              has_wide_vectors) {
3400   // Check if we want to skip execution of all compiled code.
3401   {
3402 #ifndef PRODUCT
3403     if (OptoNoExecute) {
3404       C->record_method_not_compilable("+OptoNoExecute");  // Flag as failed
3405       return;
3406     }
3407 #endif
3408     Compile::TracePhase tp("install_code", &timers[_t_registerMethod]);
3409 

3414       if (!target->is_static()) {
3415         // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3416         // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3417         // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3418         _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3419       }
3420       _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3421       _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3422     }
3423 
3424     C->env()->register_method(target,
3425                                      entry_bci,
3426                                      &_code_offsets,
3427                                      _orig_pc_slot_offset_in_bytes,
3428                                      code_buffer(),
3429                                      frame_size_in_words(),
3430                                      oop_map_set(),
3431                                      &_handler_table,
3432                                      inc_table(),
3433                                      compiler,


3434                                      has_unsafe_access,
3435                                      SharedRuntime::is_wide_vector(C->max_vector_size()),
3436                                      C->has_monitors(),
3437                                      0);

3438 
3439     if (C->log() != nullptr) { // Print code cache state into compiler log
3440       C->log()->code_cache_state();
3441     }









3442   }
3443 }
3444 void PhaseOutput::install_stub(const char* stub_name) {
3445   // Entry point will be accessed using stub_entry_point();
3446   if (code_buffer() == nullptr) {
3447     Matcher::soft_match_failure();
3448   } else {
3449     if (PrintAssembly && (WizardMode || Verbose))
3450       tty->print_cr("### Stub::%s", stub_name);
3451 
3452     if (!C->failing()) {
3453       assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3454 
3455       // Make the NMethod
3456       // For now we mark the frame as never safe for profile stackwalking
3457       RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3458                                                       code_buffer(),
3459                                                       CodeOffsets::frame_never_safe,
3460                                                       // _code_offsets.value(CodeOffsets::Frame_Complete),
3461                                                       frame_size_in_words(),

3362   masm.bind(fakeL);
3363   if (is_branch) {
3364     n->as_MachBranch()->save_label(&saveL, &save_bnum);
3365     n->as_MachBranch()->label_set(&fakeL, 0);
3366   }
3367   n->emit(&masm, C->regalloc());
3368 
3369   // Emitting into the scratch buffer should not fail
3370   assert (!C->failing(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3371 
3372   if (is_branch) // Restore label.
3373     n->as_MachBranch()->label_set(saveL, save_bnum);
3374 
3375   // End scratch_emit_size section.
3376   set_in_scratch_emit_size(false);
3377 
3378   return buf.insts_size();
3379 }
3380 
3381 void PhaseOutput::install() {
3382   if (C->should_install_code() && C->stub_function() != nullptr) {


3383     install_stub(C->stub_name());
3384   } else {
3385     install_code(C->method(),
3386                  C->entry_bci(),
3387                  CompilerThread::current()->compiler(),
3388                  C->has_unsafe_access(),
3389                  SharedRuntime::is_wide_vector(C->max_vector_size()));
3390   }
3391 }
3392 
3393 void PhaseOutput::install_code(ciMethod*         target,
3394                                int               entry_bci,
3395                                AbstractCompiler* compiler,
3396                                bool              has_unsafe_access,
3397                                bool              has_wide_vectors) {
3398   // Check if we want to skip execution of all compiled code.
3399   {
3400 #ifndef PRODUCT
3401     if (OptoNoExecute) {
3402       C->record_method_not_compilable("+OptoNoExecute");  // Flag as failed
3403       return;
3404     }
3405 #endif
3406     Compile::TracePhase tp("install_code", &timers[_t_registerMethod]);
3407 

3412       if (!target->is_static()) {
3413         // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3414         // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3415         // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3416         _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3417       }
3418       _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3419       _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3420     }
3421 
3422     C->env()->register_method(target,
3423                                      entry_bci,
3424                                      &_code_offsets,
3425                                      _orig_pc_slot_offset_in_bytes,
3426                                      code_buffer(),
3427                                      frame_size_in_words(),
3428                                      oop_map_set(),
3429                                      &_handler_table,
3430                                      inc_table(),
3431                                      compiler,
3432                                      C->has_clinit_barriers(),
3433                                      C->for_preload(),
3434                                      has_unsafe_access,
3435                                      SharedRuntime::is_wide_vector(C->max_vector_size()),
3436                                      C->has_monitors(),
3437                                      0,
3438                                      C->should_install_code());
3439 
3440     if (C->log() != nullptr) { // Print code cache state into compiler log
3441       C->log()->code_cache_state();
3442     }
3443     if (C->has_clinit_barriers()) {
3444       assert(C->for_preload(), "sanity");
3445       // Build second version of code without class initialization barriers
3446       if (C->env()->task()->compile_reason() == CompileTask::Reason_PrecompileForPreload) {
3447         // don't automatically precompile a barrier-free version unless explicitly asked
3448       } else {
3449         C->record_failure(C2Compiler::retry_no_clinit_barriers());
3450       }
3451     }
3452   }
3453 }
3454 void PhaseOutput::install_stub(const char* stub_name) {
3455   // Entry point will be accessed using stub_entry_point();
3456   if (code_buffer() == nullptr) {
3457     Matcher::soft_match_failure();
3458   } else {
3459     if (PrintAssembly && (WizardMode || Verbose))
3460       tty->print_cr("### Stub::%s", stub_name);
3461 
3462     if (!C->failing()) {
3463       assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3464 
3465       // Make the NMethod
3466       // For now we mark the frame as never safe for profile stackwalking
3467       RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3468                                                       code_buffer(),
3469                                                       CodeOffsets::frame_never_safe,
3470                                                       // _code_offsets.value(CodeOffsets::Frame_Complete),
3471                                                       frame_size_in_words(),
< prev index next >