3388 masm.bind(fakeL);
3389 if (is_branch) {
3390 n->as_MachBranch()->save_label(&saveL, &save_bnum);
3391 n->as_MachBranch()->label_set(&fakeL, 0);
3392 }
3393 n->emit(&masm, C->regalloc());
3394
3395 // Emitting into the scratch buffer should not fail
3396 assert(!C->failing_internal() || C->failure_is_artificial(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3397
3398 if (is_branch) // Restore label.
3399 n->as_MachBranch()->label_set(saveL, save_bnum);
3400
3401 // End scratch_emit_size section.
3402 set_in_scratch_emit_size(false);
3403
3404 return buf.insts_size();
3405 }
3406
3407 void PhaseOutput::install() {
3408 if (!C->should_install_code()) {
3409 return;
3410 } else if (C->stub_function() != nullptr) {
3411 install_stub(C->stub_name());
3412 } else {
3413 install_code(C->method(),
3414 C->entry_bci(),
3415 CompileBroker::compiler2(),
3416 C->has_unsafe_access(),
3417 SharedRuntime::is_wide_vector(C->max_vector_size()));
3418 }
3419 }
3420
3421 void PhaseOutput::install_code(ciMethod* target,
3422 int entry_bci,
3423 AbstractCompiler* compiler,
3424 bool has_unsafe_access,
3425 bool has_wide_vectors) {
3426 // Check if we want to skip execution of all compiled code.
3427 {
3428 #ifndef PRODUCT
3429 if (OptoNoExecute) {
3430 C->record_method_not_compilable("+OptoNoExecute"); // Flag as failed
3431 return;
3432 }
3433 #endif
3434 Compile::TracePhase tp("install_code", &timers[_t_registerMethod]);
3435
3440 if (!target->is_static()) {
3441 // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3442 // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3443 // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3444 _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3445 }
3446 _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3447 _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3448 }
3449
3450 C->env()->register_method(target,
3451 entry_bci,
3452 &_code_offsets,
3453 _orig_pc_slot_offset_in_bytes,
3454 code_buffer(),
3455 frame_size_in_words(),
3456 oop_map_set(),
3457 &_handler_table,
3458 inc_table(),
3459 compiler,
3460 has_unsafe_access,
3461 SharedRuntime::is_wide_vector(C->max_vector_size()),
3462 C->has_monitors(),
3463 C->has_scoped_access(),
3464 0);
3465
3466 if (C->log() != nullptr) { // Print code cache state into compiler log
3467 C->log()->code_cache_state();
3468 }
3469 }
3470 }
3471 void PhaseOutput::install_stub(const char* stub_name) {
3472 // Entry point will be accessed using stub_entry_point();
3473 if (code_buffer() == nullptr) {
3474 Matcher::soft_match_failure();
3475 } else {
3476 if (PrintAssembly && (WizardMode || Verbose))
3477 tty->print_cr("### Stub::%s", stub_name);
3478
3479 if (!C->failing()) {
3480 assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3481
3482 // Make the NMethod
3483 // For now we mark the frame as never safe for profile stackwalking
3484 RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3485 code_buffer(),
3486 CodeOffsets::frame_never_safe,
3487 // _code_offsets.value(CodeOffsets::Frame_Complete),
3488 frame_size_in_words(),
|
3388 masm.bind(fakeL);
3389 if (is_branch) {
3390 n->as_MachBranch()->save_label(&saveL, &save_bnum);
3391 n->as_MachBranch()->label_set(&fakeL, 0);
3392 }
3393 n->emit(&masm, C->regalloc());
3394
3395 // Emitting into the scratch buffer should not fail
3396 assert(!C->failing_internal() || C->failure_is_artificial(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3397
3398 if (is_branch) // Restore label.
3399 n->as_MachBranch()->label_set(saveL, save_bnum);
3400
3401 // End scratch_emit_size section.
3402 set_in_scratch_emit_size(false);
3403
3404 return buf.insts_size();
3405 }
3406
3407 void PhaseOutput::install() {
3408 if (C->should_install_code() && C->stub_function() != nullptr) {
3409 install_stub(C->stub_name());
3410 } else {
3411 install_code(C->method(),
3412 C->entry_bci(),
3413 CompilerThread::current()->compiler(),
3414 C->has_unsafe_access(),
3415 SharedRuntime::is_wide_vector(C->max_vector_size()));
3416 }
3417 }
3418
3419 void PhaseOutput::install_code(ciMethod* target,
3420 int entry_bci,
3421 AbstractCompiler* compiler,
3422 bool has_unsafe_access,
3423 bool has_wide_vectors) {
3424 // Check if we want to skip execution of all compiled code.
3425 {
3426 #ifndef PRODUCT
3427 if (OptoNoExecute) {
3428 C->record_method_not_compilable("+OptoNoExecute"); // Flag as failed
3429 return;
3430 }
3431 #endif
3432 Compile::TracePhase tp("install_code", &timers[_t_registerMethod]);
3433
3438 if (!target->is_static()) {
3439 // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3440 // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3441 // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3442 _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3443 }
3444 _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3445 _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3446 }
3447
3448 C->env()->register_method(target,
3449 entry_bci,
3450 &_code_offsets,
3451 _orig_pc_slot_offset_in_bytes,
3452 code_buffer(),
3453 frame_size_in_words(),
3454 oop_map_set(),
3455 &_handler_table,
3456 inc_table(),
3457 compiler,
3458 C->has_clinit_barriers(),
3459 C->for_preload(),
3460 has_unsafe_access,
3461 SharedRuntime::is_wide_vector(C->max_vector_size()),
3462 C->has_monitors(),
3463 C->has_scoped_access(),
3464 0,
3465 C->should_install_code());
3466
3467 if (C->log() != nullptr) { // Print code cache state into compiler log
3468 C->log()->code_cache_state();
3469 }
3470 if (C->has_clinit_barriers()) {
3471 assert(C->for_preload(), "sanity");
3472 // Build second version of code without class initialization barriers
3473 if (C->env()->task()->compile_reason() == CompileTask::Reason_PrecompileForPreload) {
3474 // don't automatically precompile a barrier-free version unless explicitly asked
3475 } else {
3476 C->record_failure(C2Compiler::retry_no_clinit_barriers());
3477 }
3478 }
3479 }
3480 }
3481 void PhaseOutput::install_stub(const char* stub_name) {
3482 // Entry point will be accessed using stub_entry_point();
3483 if (code_buffer() == nullptr) {
3484 Matcher::soft_match_failure();
3485 } else {
3486 if (PrintAssembly && (WizardMode || Verbose))
3487 tty->print_cr("### Stub::%s", stub_name);
3488
3489 if (!C->failing()) {
3490 assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3491
3492 // Make the NMethod
3493 // For now we mark the frame as never safe for profile stackwalking
3494 RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3495 code_buffer(),
3496 CodeOffsets::frame_never_safe,
3497 // _code_offsets.value(CodeOffsets::Frame_Complete),
3498 frame_size_in_words(),
|