3386 masm.bind(fakeL);
3387 if (is_branch) {
3388 n->as_MachBranch()->save_label(&saveL, &save_bnum);
3389 n->as_MachBranch()->label_set(&fakeL, 0);
3390 }
3391 n->emit(&masm, C->regalloc());
3392
3393 // Emitting into the scratch buffer should not fail
3394 assert (!C->failing(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3395
3396 if (is_branch) // Restore label.
3397 n->as_MachBranch()->label_set(saveL, save_bnum);
3398
3399 // End scratch_emit_size section.
3400 set_in_scratch_emit_size(false);
3401
3402 return buf.insts_size();
3403 }
3404
3405 void PhaseOutput::install() {
3406 if (!C->should_install_code()) {
3407 return;
3408 } else if (C->stub_function() != nullptr) {
3409 install_stub(C->stub_name());
3410 } else {
3411 install_code(C->method(),
3412 C->entry_bci(),
3413 CompileBroker::compiler2(),
3414 C->has_unsafe_access(),
3415 SharedRuntime::is_wide_vector(C->max_vector_size()));
3416 }
3417 }
3418
3419 void PhaseOutput::install_code(ciMethod* target,
3420 int entry_bci,
3421 AbstractCompiler* compiler,
3422 bool has_unsafe_access,
3423 bool has_wide_vectors) {
3424 // Check if we want to skip execution of all compiled code.
3425 {
3426 #ifndef PRODUCT
3427 if (OptoNoExecute) {
3428 C->record_method_not_compilable("+OptoNoExecute"); // Flag as failed
3429 return;
3430 }
3431 #endif
3432 Compile::TracePhase tp("install_code", &timers[_t_registerMethod]);
3433
3438 if (!target->is_static()) {
3439 // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3440 // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3441 // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3442 _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3443 }
3444 _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3445 _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3446 }
3447
3448 C->env()->register_method(target,
3449 entry_bci,
3450 &_code_offsets,
3451 _orig_pc_slot_offset_in_bytes,
3452 code_buffer(),
3453 frame_size_in_words(),
3454 oop_map_set(),
3455 &_handler_table,
3456 inc_table(),
3457 compiler,
3458 has_unsafe_access,
3459 SharedRuntime::is_wide_vector(C->max_vector_size()),
3460 C->has_monitors(),
3461 C->has_scoped_access(),
3462 0);
3463
3464 if (C->log() != nullptr) { // Print code cache state into compiler log
3465 C->log()->code_cache_state();
3466 }
3467 }
3468 }
3469 void PhaseOutput::install_stub(const char* stub_name) {
3470 // Entry point will be accessed using stub_entry_point();
3471 if (code_buffer() == nullptr) {
3472 Matcher::soft_match_failure();
3473 } else {
3474 if (PrintAssembly && (WizardMode || Verbose))
3475 tty->print_cr("### Stub::%s", stub_name);
3476
3477 if (!C->failing()) {
3478 assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3479
3480 // Make the NMethod
3481 // For now we mark the frame as never safe for profile stackwalking
3482 RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3483 code_buffer(),
3484 CodeOffsets::frame_never_safe,
3485 // _code_offsets.value(CodeOffsets::Frame_Complete),
3486 frame_size_in_words(),
|
3386 masm.bind(fakeL);
3387 if (is_branch) {
3388 n->as_MachBranch()->save_label(&saveL, &save_bnum);
3389 n->as_MachBranch()->label_set(&fakeL, 0);
3390 }
3391 n->emit(&masm, C->regalloc());
3392
3393 // Emitting into the scratch buffer should not fail
3394 assert (!C->failing(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3395
3396 if (is_branch) // Restore label.
3397 n->as_MachBranch()->label_set(saveL, save_bnum);
3398
3399 // End scratch_emit_size section.
3400 set_in_scratch_emit_size(false);
3401
3402 return buf.insts_size();
3403 }
3404
3405 void PhaseOutput::install() {
3406 if (C->should_install_code() && C->stub_function() != nullptr) {
3407 install_stub(C->stub_name());
3408 } else {
3409 install_code(C->method(),
3410 C->entry_bci(),
3411 CompilerThread::current()->compiler(),
3412 C->has_unsafe_access(),
3413 SharedRuntime::is_wide_vector(C->max_vector_size()));
3414 }
3415 }
3416
3417 void PhaseOutput::install_code(ciMethod* target,
3418 int entry_bci,
3419 AbstractCompiler* compiler,
3420 bool has_unsafe_access,
3421 bool has_wide_vectors) {
3422 // Check if we want to skip execution of all compiled code.
3423 {
3424 #ifndef PRODUCT
3425 if (OptoNoExecute) {
3426 C->record_method_not_compilable("+OptoNoExecute"); // Flag as failed
3427 return;
3428 }
3429 #endif
3430 Compile::TracePhase tp("install_code", &timers[_t_registerMethod]);
3431
3436 if (!target->is_static()) {
3437 // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3438 // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3439 // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3440 _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3441 }
3442 _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3443 _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3444 }
3445
3446 C->env()->register_method(target,
3447 entry_bci,
3448 &_code_offsets,
3449 _orig_pc_slot_offset_in_bytes,
3450 code_buffer(),
3451 frame_size_in_words(),
3452 oop_map_set(),
3453 &_handler_table,
3454 inc_table(),
3455 compiler,
3456 C->has_clinit_barriers(),
3457 C->for_preload(),
3458 has_unsafe_access,
3459 SharedRuntime::is_wide_vector(C->max_vector_size()),
3460 C->has_monitors(),
3461 C->has_scoped_access(),
3462 0,
3463 C->should_install_code());
3464
3465 if (C->log() != nullptr) { // Print code cache state into compiler log
3466 C->log()->code_cache_state();
3467 }
3468 if (C->has_clinit_barriers()) {
3469 assert(C->for_preload(), "sanity");
3470 // Build second version of code without class initialization barriers
3471 if (C->env()->task()->compile_reason() == CompileTask::Reason_PrecompileForPreload) {
3472 // don't automatically precompile a barrier-free version unless explicitly asked
3473 } else {
3474 C->record_failure(C2Compiler::retry_no_clinit_barriers());
3475 }
3476 }
3477 }
3478 }
3479 void PhaseOutput::install_stub(const char* stub_name) {
3480 // Entry point will be accessed using stub_entry_point();
3481 if (code_buffer() == nullptr) {
3482 Matcher::soft_match_failure();
3483 } else {
3484 if (PrintAssembly && (WizardMode || Verbose))
3485 tty->print_cr("### Stub::%s", stub_name);
3486
3487 if (!C->failing()) {
3488 assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3489
3490 // Make the NMethod
3491 // For now we mark the frame as never safe for profile stackwalking
3492 RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3493 code_buffer(),
3494 CodeOffsets::frame_never_safe,
3495 // _code_offsets.value(CodeOffsets::Frame_Complete),
3496 frame_size_in_words(),
|