< prev index next >

src/hotspot/share/opto/output.cpp

Print this page

3361     MacroAssembler masm(&buf);
3362     masm.bind(fakeL);
3363     n->as_MachBranch()->save_label(&saveL, &save_bnum);
3364     n->as_MachBranch()->label_set(&fakeL, 0);
3365   }
3366   n->emit(buf, C->regalloc());
3367 
3368   // Emitting into the scratch buffer should not fail
3369   assert (!C->failing(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3370 
3371   if (is_branch) // Restore label.
3372     n->as_MachBranch()->label_set(saveL, save_bnum);
3373 
3374   // End scratch_emit_size section.
3375   set_in_scratch_emit_size(false);
3376 
3377   return buf.insts_size();
3378 }
3379 
3380 void PhaseOutput::install() {
3381   if (!C->should_install_code()) {
3382     return;
3383   } else if (C->stub_function() != nullptr) {
3384     install_stub(C->stub_name());
3385   } else {
3386     install_code(C->method(),
3387                  C->entry_bci(),
3388                  CompileBroker::compiler2(),
3389                  C->has_unsafe_access(),
3390                  SharedRuntime::is_wide_vector(C->max_vector_size()),
3391                  C->rtm_state());
3392   }
3393 }
3394 
3395 void PhaseOutput::install_code(ciMethod*         target,
3396                                int               entry_bci,
3397                                AbstractCompiler* compiler,
3398                                bool              has_unsafe_access,
3399                                bool              has_wide_vectors,
3400                                RTMState          rtm_state) {
3401   // Check if we want to skip execution of all compiled code.
3402   {
3403 #ifndef PRODUCT
3404     if (OptoNoExecute) {
3405       C->record_method_not_compilable("+OptoNoExecute");  // Flag as failed
3406       return;
3407     }
3408 #endif

3415       if (!target->is_static()) {
3416         // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3417         // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3418         // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3419         _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3420       }
3421       _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3422       _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3423     }
3424 
3425     C->env()->register_method(target,
3426                                      entry_bci,
3427                                      &_code_offsets,
3428                                      _orig_pc_slot_offset_in_bytes,
3429                                      code_buffer(),
3430                                      frame_size_in_words(),
3431                                      oop_map_set(),
3432                                      &_handler_table,
3433                                      inc_table(),
3434                                      compiler,


3435                                      has_unsafe_access,
3436                                      SharedRuntime::is_wide_vector(C->max_vector_size()),
3437                                      C->has_monitors(),
3438                                      0,

3439                                      C->rtm_state());
3440 
3441     if (C->log() != nullptr) { // Print code cache state into compiler log
3442       C->log()->code_cache_state();
3443     }









3444   }
3445 }
3446 void PhaseOutput::install_stub(const char* stub_name) {
3447   // Entry point will be accessed using stub_entry_point();
3448   if (code_buffer() == nullptr) {
3449     Matcher::soft_match_failure();
3450   } else {
3451     if (PrintAssembly && (WizardMode || Verbose))
3452       tty->print_cr("### Stub::%s", stub_name);
3453 
3454     if (!C->failing()) {
3455       assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3456 
3457       // Make the NMethod
3458       // For now we mark the frame as never safe for profile stackwalking
3459       RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3460                                                       code_buffer(),
3461                                                       CodeOffsets::frame_never_safe,
3462                                                       // _code_offsets.value(CodeOffsets::Frame_Complete),
3463                                                       frame_size_in_words(),

3361     MacroAssembler masm(&buf);
3362     masm.bind(fakeL);
3363     n->as_MachBranch()->save_label(&saveL, &save_bnum);
3364     n->as_MachBranch()->label_set(&fakeL, 0);
3365   }
3366   n->emit(buf, C->regalloc());
3367 
3368   // Emitting into the scratch buffer should not fail
3369   assert (!C->failing(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3370 
3371   if (is_branch) // Restore label.
3372     n->as_MachBranch()->label_set(saveL, save_bnum);
3373 
3374   // End scratch_emit_size section.
3375   set_in_scratch_emit_size(false);
3376 
3377   return buf.insts_size();
3378 }
3379 
3380 void PhaseOutput::install() {
3381   if (C->should_install_code() && C->stub_function() != nullptr) {


3382     install_stub(C->stub_name());
3383   } else {
3384     install_code(C->method(),
3385                  C->entry_bci(),
3386                  CompilerThread::current()->compiler(),
3387                  C->has_unsafe_access(),
3388                  SharedRuntime::is_wide_vector(C->max_vector_size()),
3389                  C->rtm_state());
3390   }
3391 }
3392 
3393 void PhaseOutput::install_code(ciMethod*         target,
3394                                int               entry_bci,
3395                                AbstractCompiler* compiler,
3396                                bool              has_unsafe_access,
3397                                bool              has_wide_vectors,
3398                                RTMState          rtm_state) {
3399   // Check if we want to skip execution of all compiled code.
3400   {
3401 #ifndef PRODUCT
3402     if (OptoNoExecute) {
3403       C->record_method_not_compilable("+OptoNoExecute");  // Flag as failed
3404       return;
3405     }
3406 #endif

3413       if (!target->is_static()) {
3414         // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3415         // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3416         // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3417         _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3418       }
3419       _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3420       _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3421     }
3422 
3423     C->env()->register_method(target,
3424                                      entry_bci,
3425                                      &_code_offsets,
3426                                      _orig_pc_slot_offset_in_bytes,
3427                                      code_buffer(),
3428                                      frame_size_in_words(),
3429                                      oop_map_set(),
3430                                      &_handler_table,
3431                                      inc_table(),
3432                                      compiler,
3433                                      C->has_clinit_barriers(),
3434                                      C->for_preload(),
3435                                      has_unsafe_access,
3436                                      SharedRuntime::is_wide_vector(C->max_vector_size()),
3437                                      C->has_monitors(),
3438                                      0,
3439                                      C->should_install_code(),
3440                                      C->rtm_state());
3441 
3442     if (C->log() != nullptr) { // Print code cache state into compiler log
3443       C->log()->code_cache_state();
3444     }
3445     if (C->has_clinit_barriers()) {
3446       assert(C->for_preload(), "sanity");
3447       // Build second version of code without class initialization barriers
3448       if (C->env()->task()->compile_reason() == CompileTask::Reason_PrecompileForPreload) {
3449         // don't automatically precompile a barrier-free version unless explicitly asked
3450       } else {
3451         C->record_failure(C2Compiler::retry_no_clinit_barriers());
3452       }
3453     }
3454   }
3455 }
3456 void PhaseOutput::install_stub(const char* stub_name) {
3457   // Entry point will be accessed using stub_entry_point();
3458   if (code_buffer() == nullptr) {
3459     Matcher::soft_match_failure();
3460   } else {
3461     if (PrintAssembly && (WizardMode || Verbose))
3462       tty->print_cr("### Stub::%s", stub_name);
3463 
3464     if (!C->failing()) {
3465       assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3466 
3467       // Make the NMethod
3468       // For now we mark the frame as never safe for profile stackwalking
3469       RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3470                                                       code_buffer(),
3471                                                       CodeOffsets::frame_never_safe,
3472                                                       // _code_offsets.value(CodeOffsets::Frame_Complete),
3473                                                       frame_size_in_words(),
< prev index next >