3381 masm.bind(fakeL);
3382 if (is_branch) {
3383 n->as_MachBranch()->save_label(&saveL, &save_bnum);
3384 n->as_MachBranch()->label_set(&fakeL, 0);
3385 }
3386 n->emit(&masm, C->regalloc());
3387
3388 // Emitting into the scratch buffer should not fail
3389 assert(!C->failing_internal() || C->failure_is_artificial(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3390
3391 if (is_branch) // Restore label.
3392 n->as_MachBranch()->label_set(saveL, save_bnum);
3393
3394 // End scratch_emit_size section.
3395 set_in_scratch_emit_size(false);
3396
3397 return buf.insts_size();
3398 }
3399
3400 void PhaseOutput::install() {
3401 if (!C->should_install_code()) {
3402 return;
3403 } else if (C->stub_function() != nullptr) {
3404 install_stub(C->stub_name());
3405 } else {
3406 install_code(C->method(),
3407 C->entry_bci(),
3408 CompileBroker::compiler2(),
3409 C->has_unsafe_access(),
3410 SharedRuntime::is_wide_vector(C->max_vector_size()));
3411 }
3412 }
3413
3414 void PhaseOutput::install_code(ciMethod* target,
3415 int entry_bci,
3416 AbstractCompiler* compiler,
3417 bool has_unsafe_access,
3418 bool has_wide_vectors) {
3419 // Check if we want to skip execution of all compiled code.
3420 {
3421 #ifndef PRODUCT
3422 if (OptoNoExecute) {
3423 C->record_method_not_compilable("+OptoNoExecute"); // Flag as failed
3424 return;
3425 }
3426 #endif
3427 Compile::TracePhase tp(_t_registerMethod);
3428
3433 if (!target->is_static()) {
3434 // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3435 // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3436 // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3437 _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3438 }
3439 _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3440 _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3441 }
3442
3443 C->env()->register_method(target,
3444 entry_bci,
3445 &_code_offsets,
3446 _orig_pc_slot_offset_in_bytes,
3447 code_buffer(),
3448 frame_size_in_words(),
3449 oop_map_set(),
3450 &_handler_table,
3451 inc_table(),
3452 compiler,
3453 has_unsafe_access,
3454 SharedRuntime::is_wide_vector(C->max_vector_size()),
3455 C->has_monitors(),
3456 C->has_scoped_access(),
3457 0);
3458
3459 if (C->log() != nullptr) { // Print code cache state into compiler log
3460 C->log()->code_cache_state();
3461 }
3462 }
3463 }
3464 void PhaseOutput::install_stub(const char* stub_name) {
3465 // Entry point will be accessed using stub_entry_point();
3466 if (code_buffer() == nullptr) {
3467 Matcher::soft_match_failure();
3468 } else {
3469 if (PrintAssembly && (WizardMode || Verbose))
3470 tty->print_cr("### Stub::%s", stub_name);
3471
3472 if (!C->failing()) {
3473 assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3474
3475 // Make the NMethod
3476 // For now we mark the frame as never safe for profile stackwalking
3477 RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3478 code_buffer(),
3479 CodeOffsets::frame_never_safe,
3480 // _code_offsets.value(CodeOffsets::Frame_Complete),
3481 frame_size_in_words(),
|
3381 masm.bind(fakeL);
3382 if (is_branch) {
3383 n->as_MachBranch()->save_label(&saveL, &save_bnum);
3384 n->as_MachBranch()->label_set(&fakeL, 0);
3385 }
3386 n->emit(&masm, C->regalloc());
3387
3388 // Emitting into the scratch buffer should not fail
3389 assert(!C->failing_internal() || C->failure_is_artificial(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3390
3391 if (is_branch) // Restore label.
3392 n->as_MachBranch()->label_set(saveL, save_bnum);
3393
3394 // End scratch_emit_size section.
3395 set_in_scratch_emit_size(false);
3396
3397 return buf.insts_size();
3398 }
3399
3400 void PhaseOutput::install() {
3401 if (C->should_install_code() && C->stub_function() != nullptr) {
3402 install_stub(C->stub_name());
3403 } else {
3404 install_code(C->method(),
3405 C->entry_bci(),
3406 CompilerThread::current()->compiler(),
3407 C->has_unsafe_access(),
3408 SharedRuntime::is_wide_vector(C->max_vector_size()));
3409 }
3410 }
3411
3412 void PhaseOutput::install_code(ciMethod* target,
3413 int entry_bci,
3414 AbstractCompiler* compiler,
3415 bool has_unsafe_access,
3416 bool has_wide_vectors) {
3417 // Check if we want to skip execution of all compiled code.
3418 {
3419 #ifndef PRODUCT
3420 if (OptoNoExecute) {
3421 C->record_method_not_compilable("+OptoNoExecute"); // Flag as failed
3422 return;
3423 }
3424 #endif
3425 Compile::TracePhase tp(_t_registerMethod);
3426
3431 if (!target->is_static()) {
3432 // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3433 // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3434 // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3435 _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3436 }
3437 _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3438 _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3439 }
3440
3441 C->env()->register_method(target,
3442 entry_bci,
3443 &_code_offsets,
3444 _orig_pc_slot_offset_in_bytes,
3445 code_buffer(),
3446 frame_size_in_words(),
3447 oop_map_set(),
3448 &_handler_table,
3449 inc_table(),
3450 compiler,
3451 C->has_clinit_barriers(),
3452 C->for_preload(),
3453 has_unsafe_access,
3454 SharedRuntime::is_wide_vector(C->max_vector_size()),
3455 C->has_monitors(),
3456 C->has_scoped_access(),
3457 0,
3458 C->should_install_code());
3459
3460 if (C->log() != nullptr) { // Print code cache state into compiler log
3461 C->log()->code_cache_state();
3462 }
3463 if (C->has_clinit_barriers()) {
3464 assert(C->for_preload(), "sanity");
3465 // Build second version of code without class initialization barriers
3466 if (C->env()->task()->compile_reason() == CompileTask::Reason_PrecompileForPreload) {
3467 // don't automatically precompile a barrier-free version unless explicitly asked
3468 } else {
3469 C->record_failure(C2Compiler::retry_no_clinit_barriers());
3470 }
3471 }
3472 }
3473 }
3474 void PhaseOutput::install_stub(const char* stub_name) {
3475 // Entry point will be accessed using stub_entry_point();
3476 if (code_buffer() == nullptr) {
3477 Matcher::soft_match_failure();
3478 } else {
3479 if (PrintAssembly && (WizardMode || Verbose))
3480 tty->print_cr("### Stub::%s", stub_name);
3481
3482 if (!C->failing()) {
3483 assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3484
3485 // Make the NMethod
3486 // For now we mark the frame as never safe for profile stackwalking
3487 RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3488 code_buffer(),
3489 CodeOffsets::frame_never_safe,
3490 // _code_offsets.value(CodeOffsets::Frame_Complete),
3491 frame_size_in_words(),
|