3385 masm.bind(fakeL);
3386 if (is_branch) {
3387 n->as_MachBranch()->save_label(&saveL, &save_bnum);
3388 n->as_MachBranch()->label_set(&fakeL, 0);
3389 }
3390 n->emit(&masm, C->regalloc());
3391
3392 // Emitting into the scratch buffer should not fail
3393 assert(!C->failing_internal() || C->failure_is_artificial(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3394
3395 if (is_branch) // Restore label.
3396 n->as_MachBranch()->label_set(saveL, save_bnum);
3397
3398 // End scratch_emit_size section.
3399 set_in_scratch_emit_size(false);
3400
3401 return buf.insts_size();
3402 }
3403
3404 void PhaseOutput::install() {
3405 if (!C->should_install_code()) {
3406 return;
3407 } else if (C->stub_function() != nullptr) {
3408 install_stub(C->stub_name());
3409 } else {
3410 install_code(C->method(),
3411 C->entry_bci(),
3412 CompileBroker::compiler2(),
3413 C->has_unsafe_access(),
3414 SharedRuntime::is_wide_vector(C->max_vector_size()));
3415 }
3416 }
3417
3418 void PhaseOutput::install_code(ciMethod* target,
3419 int entry_bci,
3420 AbstractCompiler* compiler,
3421 bool has_unsafe_access,
3422 bool has_wide_vectors) {
3423 // Check if we want to skip execution of all compiled code.
3424 {
3425 #ifndef PRODUCT
3426 if (OptoNoExecute) {
3427 C->record_method_not_compilable("+OptoNoExecute"); // Flag as failed
3428 return;
3429 }
3430 #endif
3431 Compile::TracePhase tp(_t_registerMethod);
3432
3437 if (!target->is_static()) {
3438 // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3439 // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3440 // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3441 _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3442 }
3443 _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3444 _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3445 }
3446
3447 C->env()->register_method(target,
3448 entry_bci,
3449 &_code_offsets,
3450 _orig_pc_slot_offset_in_bytes,
3451 code_buffer(),
3452 frame_size_in_words(),
3453 oop_map_set(),
3454 &_handler_table,
3455 inc_table(),
3456 compiler,
3457 has_unsafe_access,
3458 SharedRuntime::is_wide_vector(C->max_vector_size()),
3459 C->has_monitors(),
3460 C->has_scoped_access(),
3461 0);
3462
3463 if (C->log() != nullptr) { // Print code cache state into compiler log
3464 C->log()->code_cache_state();
3465 }
3466 }
3467 }
3468 void PhaseOutput::install_stub(const char* stub_name) {
3469 // Entry point will be accessed using stub_entry_point();
3470 if (code_buffer() == nullptr) {
3471 Matcher::soft_match_failure();
3472 } else {
3473 if (PrintAssembly && (WizardMode || Verbose))
3474 tty->print_cr("### Stub::%s", stub_name);
3475
3476 if (!C->failing()) {
3477 assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3478
3479 // Make the NMethod
3480 // For now we mark the frame as never safe for profile stackwalking
3481 RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3482 code_buffer(),
3483 CodeOffsets::frame_never_safe,
3484 // _code_offsets.value(CodeOffsets::Frame_Complete),
3485 frame_size_in_words(),
|
3385 masm.bind(fakeL);
3386 if (is_branch) {
3387 n->as_MachBranch()->save_label(&saveL, &save_bnum);
3388 n->as_MachBranch()->label_set(&fakeL, 0);
3389 }
3390 n->emit(&masm, C->regalloc());
3391
3392 // Emitting into the scratch buffer should not fail
3393 assert(!C->failing_internal() || C->failure_is_artificial(), "Must not have pending failure. Reason is: %s", C->failure_reason());
3394
3395 if (is_branch) // Restore label.
3396 n->as_MachBranch()->label_set(saveL, save_bnum);
3397
3398 // End scratch_emit_size section.
3399 set_in_scratch_emit_size(false);
3400
3401 return buf.insts_size();
3402 }
3403
3404 void PhaseOutput::install() {
3405 if (C->should_install_code() && C->stub_function() != nullptr) {
3406 install_stub(C->stub_name());
3407 } else {
3408 install_code(C->method(),
3409 C->entry_bci(),
3410 CompilerThread::current()->compiler(),
3411 C->has_unsafe_access(),
3412 SharedRuntime::is_wide_vector(C->max_vector_size()));
3413 }
3414 }
3415
3416 void PhaseOutput::install_code(ciMethod* target,
3417 int entry_bci,
3418 AbstractCompiler* compiler,
3419 bool has_unsafe_access,
3420 bool has_wide_vectors) {
3421 // Check if we want to skip execution of all compiled code.
3422 {
3423 #ifndef PRODUCT
3424 if (OptoNoExecute) {
3425 C->record_method_not_compilable("+OptoNoExecute"); // Flag as failed
3426 return;
3427 }
3428 #endif
3429 Compile::TracePhase tp(_t_registerMethod);
3430
3435 if (!target->is_static()) {
3436 // The UEP of an nmethod ensures that the VEP is padded. However, the padding of the UEP is placed
3437 // before the inline cache check, so we don't have to execute any nop instructions when dispatching
3438 // through the UEP, yet we can ensure that the VEP is aligned appropriately.
3439 _code_offsets.set_value(CodeOffsets::Entry, _first_block_size - MacroAssembler::ic_check_size());
3440 }
3441 _code_offsets.set_value(CodeOffsets::Verified_Entry, _first_block_size);
3442 _code_offsets.set_value(CodeOffsets::OSR_Entry, 0);
3443 }
3444
3445 C->env()->register_method(target,
3446 entry_bci,
3447 &_code_offsets,
3448 _orig_pc_slot_offset_in_bytes,
3449 code_buffer(),
3450 frame_size_in_words(),
3451 oop_map_set(),
3452 &_handler_table,
3453 inc_table(),
3454 compiler,
3455 C->has_clinit_barriers(),
3456 C->for_preload(),
3457 has_unsafe_access,
3458 SharedRuntime::is_wide_vector(C->max_vector_size()),
3459 C->has_monitors(),
3460 C->has_scoped_access(),
3461 0,
3462 C->should_install_code());
3463
3464 if (C->log() != nullptr) { // Print code cache state into compiler log
3465 C->log()->code_cache_state();
3466 }
3467 if (C->has_clinit_barriers()) {
3468 assert(C->for_preload(), "sanity");
3469 // Build second version of code without class initialization barriers
3470 if (C->env()->task()->compile_reason() == CompileTask::Reason_PrecompileForPreload) {
3471 // don't automatically precompile a barrier-free version unless explicitly asked
3472 } else {
3473 C->record_failure(C2Compiler::retry_no_clinit_barriers());
3474 }
3475 }
3476 }
3477 }
3478 void PhaseOutput::install_stub(const char* stub_name) {
3479 // Entry point will be accessed using stub_entry_point();
3480 if (code_buffer() == nullptr) {
3481 Matcher::soft_match_failure();
3482 } else {
3483 if (PrintAssembly && (WizardMode || Verbose))
3484 tty->print_cr("### Stub::%s", stub_name);
3485
3486 if (!C->failing()) {
3487 assert(C->fixed_slots() == 0, "no fixed slots used for runtime stubs");
3488
3489 // Make the NMethod
3490 // For now we mark the frame as never safe for profile stackwalking
3491 RuntimeStub *rs = RuntimeStub::new_runtime_stub(stub_name,
3492 code_buffer(),
3493 CodeOffsets::frame_never_safe,
3494 // _code_offsets.value(CodeOffsets::Frame_Complete),
3495 frame_size_in_words(),
|