229 CompiledIC::CompiledIC(RelocIterator* iter)
230 : _method(iter->code())
231 {
232 _call = _method->call_wrapper_at(iter->addr());
233 address ic_call = _call->instruction_address();
234
235 CompiledMethod* nm = iter->code();
236 assert(ic_call != nullptr, "ic_call address must be set");
237 assert(nm != nullptr, "must pass compiled method");
238 assert(nm->contains(ic_call), "must be in compiled method");
239
240 initialize_from_iter(iter);
241 }
242
243 // This function may fail for two reasons: either due to running out of vtable
244 // stubs, or due to running out of IC stubs in an attempted transition to a
245 // transitional state. The needs_ic_stub_refill value will be set if the failure
246 // was due to running out of IC stubs, in which case the caller will refill IC
247 // stubs and retry.
248 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode,
249 bool& needs_ic_stub_refill, TRAPS) {
250 assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
251 assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
252 assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
253
254 address entry;
255 if (call_info->call_kind() == CallInfo::itable_call) {
256 assert(bytecode == Bytecodes::_invokeinterface, "");
257 int itable_index = call_info->itable_index();
258 entry = VtableStubs::find_itable_stub(itable_index);
259 if (entry == nullptr) {
260 return false;
261 }
262 #ifdef ASSERT
263 int index = call_info->resolved_method()->itable_index();
264 assert(index == itable_index, "CallInfo pre-computes this");
265 InstanceKlass* k = call_info->resolved_method()->method_holder();
266 assert(k->verify_itable_index(itable_index), "sanity check");
267 #endif //ASSERT
268 CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
269 call_info->resolved_klass(), false);
270 holder->claim();
271 if (!InlineCacheBuffer::create_transition_stub(this, holder, entry)) {
272 delete holder;
273 needs_ic_stub_refill = true;
274 return false;
275 }
276 // LSan appears unable to follow malloc-based memory consistently when embedded as an immediate
277 // in generated machine code. So we have to ignore it.
278 LSAN_IGNORE_OBJECT(holder);
279 } else {
280 assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
281 // Can be different than selected_method->vtable_index(), due to package-private etc.
282 int vtable_index = call_info->vtable_index();
283 assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
284 entry = VtableStubs::find_vtable_stub(vtable_index);
285 if (entry == nullptr) {
286 return false;
287 }
288 if (!InlineCacheBuffer::create_transition_stub(this, nullptr, entry)) {
289 needs_ic_stub_refill = true;
290 return false;
291 }
292 }
293
294 if (TraceICs) {
295 ResourceMark rm;
296 assert(call_info->selected_method() != nullptr, "Unexpected null selected method");
297 tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
298 p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
299 }
300
301 // We can't check this anymore. With lazy deopt we could have already
302 // cleaned this IC entry before we even return. This is possible if
303 // we ran out of space in the inline cache buffer trying to do the
304 // set_next and we safepointed to free up space. This is a benign
492 // cleaned this IC entry before we even return. This is possible if
493 // we ran out of space in the inline cache buffer trying to do the
494 // set_next and we safepointed to free up space. This is a benign
495 // race because the IC entry was complete when we safepointed so
496 // cleaning it immediately is harmless.
497 // assert(is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
498 return true;
499 }
500
501
502 // is_optimized: Compiler has generated an optimized call (i.e. fixed, no inline cache)
503 // static_bound: The call can be static bound. If it isn't also optimized, the property
504 // wasn't provable at time of compilation. An optimized call will have any necessary
505 // null check, while a static_bound won't. A static_bound (but not optimized) must
506 // therefore use the unverified entry point.
507 void CompiledIC::compute_monomorphic_entry(const methodHandle& method,
508 Klass* receiver_klass,
509 bool is_optimized,
510 bool static_bound,
511 bool caller_is_nmethod,
512 CompiledICInfo& info,
513 TRAPS) {
514 CompiledMethod* method_code = method->code();
515
516 address entry = nullptr;
517 if (method_code != nullptr && method_code->is_in_use() && !method_code->is_unloading()) {
518 assert(method_code->is_compiled(), "must be compiled");
519 // Call to compiled code
520 //
521 // Note: the following problem exists with Compiler1:
522 // - at compile time we may or may not know if the destination is final
523 // - if we know that the destination is final (is_optimized), we will emit
524 // an optimized virtual call (no inline cache), and need a Method* to make
525 // a call to the interpreter
526 // - if we don't know if the destination is final, we emit a standard
527 // virtual call, and use CompiledICHolder to call interpreted code
528 // (no static call stub has been generated)
529 // - In the case that we here notice the call is static bound we
530 // convert the call into what looks to be an optimized virtual call,
531 // but we must use the unverified entry point (since there will be no
532 // null check on a call when the target isn't loaded).
533 // This causes problems when verifying the IC because
534 // it looks vanilla but is optimized. Code in is_call_to_interpreted
535 // is aware of this and weakens its asserts.
536 if (is_optimized) {
537 entry = method_code->verified_entry_point();
538 } else {
539 entry = method_code->entry_point();
540 }
541 }
542 if (entry != nullptr) {
543 // Call to near compiled code.
544 info.set_compiled_entry(entry, is_optimized ? nullptr : receiver_klass, is_optimized);
545 } else {
546 if (is_optimized) {
547 // Use stub entry
548 info.set_interpreter_entry(method()->get_c2i_entry(), method());
549 } else {
550 // Use icholder entry
551 assert(method_code == nullptr || method_code->is_compiled(), "must be compiled");
552 CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
553 info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
554 }
555 }
556 assert(info.is_optimized() == is_optimized, "must agree");
557 }
558
559
560 bool CompiledIC::is_icholder_entry(address entry) {
561 CodeBlob* cb = CodeCache::find_blob(entry);
562 if (cb == nullptr) {
563 return false;
564 }
565 if (cb->is_adapter_blob()) {
566 return true;
567 } else if (cb->is_vtable_blob()) {
568 return VtableStubs::is_icholder_entry(entry);
569 }
570 return false;
571 }
572
573 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {
619 }
620
621 void CompiledStaticCall::set(const StaticCallInfo& info) {
622 assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
623 // Updating a cache to the wrong entry can cause bugs that are very hard
624 // to track down - if cache entry gets invalid - we just clean it. In
625 // this way it is always the same code path that is responsible for
626 // updating and resolving an inline cache
627 assert(is_clean(), "do not update a call entry - use clean");
628
629 if (info._to_interpreter) {
630 // Call to interpreted code
631 set_to_interpreted(info.callee(), info.entry());
632 } else {
633 set_to_compiled(info.entry());
634 }
635 }
636
637 // Compute settings for a CompiledStaticCall. Since we might have to set
638 // the stub when calling to the interpreter, we need to return arguments.
639 void CompiledStaticCall::compute_entry(const methodHandle& m, bool caller_is_nmethod, StaticCallInfo& info) {
640 CompiledMethod* m_code = m->code();
641 info._callee = m;
642 if (m_code != nullptr && m_code->is_in_use() && !m_code->is_unloading()) {
643 info._to_interpreter = false;
644 info._entry = m_code->verified_entry_point();
645 } else {
646 // Callee is interpreted code. In any case entering the interpreter
647 // puts a converter-frame on the stack to save arguments.
648 assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");
649 info._to_interpreter = true;
650 info._entry = m()->get_c2i_entry();
651 }
652 }
653
654 void CompiledStaticCall::compute_entry_for_continuation_entry(const methodHandle& m, StaticCallInfo& info) {
655 if (ContinuationEntry::is_interpreted_call(instruction_address())) {
656 info._to_interpreter = true;
657 info._entry = m()->get_c2i_entry();
658 }
659 }
660
661 address CompiledDirectStaticCall::find_stub_for(address instruction) {
662 // Find reloc. information containing this call-site
663 RelocIterator iter((nmethod*)nullptr, instruction);
664 while (iter.next()) {
665 if (iter.addr() == instruction) {
666 switch(iter.type()) {
667 case relocInfo::static_call_type:
668 return iter.static_call_reloc()->static_stub();
669 // We check here for opt_virtual_call_type, since we reuse the code
670 // from the CompiledIC implementation
|
229 CompiledIC::CompiledIC(RelocIterator* iter)
230 : _method(iter->code())
231 {
232 _call = _method->call_wrapper_at(iter->addr());
233 address ic_call = _call->instruction_address();
234
235 CompiledMethod* nm = iter->code();
236 assert(ic_call != nullptr, "ic_call address must be set");
237 assert(nm != nullptr, "must pass compiled method");
238 assert(nm->contains(ic_call), "must be in compiled method");
239
240 initialize_from_iter(iter);
241 }
242
243 // This function may fail for two reasons: either due to running out of vtable
244 // stubs, or due to running out of IC stubs in an attempted transition to a
245 // transitional state. The needs_ic_stub_refill value will be set if the failure
246 // was due to running out of IC stubs, in which case the caller will refill IC
247 // stubs and retry.
248 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode,
249 bool& needs_ic_stub_refill, bool caller_is_c1, TRAPS) {
250 assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
251 assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
252 assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
253
254 address entry;
255 if (call_info->call_kind() == CallInfo::itable_call) {
256 assert(bytecode == Bytecodes::_invokeinterface, "");
257 int itable_index = call_info->itable_index();
258 entry = VtableStubs::find_itable_stub(itable_index, caller_is_c1);
259 if (entry == nullptr) {
260 return false;
261 }
262 #ifdef ASSERT
263 int index = call_info->resolved_method()->itable_index();
264 assert(index == itable_index, "CallInfo pre-computes this");
265 InstanceKlass* k = call_info->resolved_method()->method_holder();
266 assert(k->verify_itable_index(itable_index), "sanity check");
267 #endif //ASSERT
268 CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
269 call_info->resolved_klass(), false);
270 holder->claim();
271 if (!InlineCacheBuffer::create_transition_stub(this, holder, entry)) {
272 delete holder;
273 needs_ic_stub_refill = true;
274 return false;
275 }
276 // LSan appears unable to follow malloc-based memory consistently when embedded as an immediate
277 // in generated machine code. So we have to ignore it.
278 LSAN_IGNORE_OBJECT(holder);
279 } else {
280 assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
281 // Can be different than selected_method->vtable_index(), due to package-private etc.
282 int vtable_index = call_info->vtable_index();
283 assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
284 entry = VtableStubs::find_vtable_stub(vtable_index, caller_is_c1);
285 if (entry == nullptr) {
286 return false;
287 }
288 if (!InlineCacheBuffer::create_transition_stub(this, nullptr, entry)) {
289 needs_ic_stub_refill = true;
290 return false;
291 }
292 }
293
294 if (TraceICs) {
295 ResourceMark rm;
296 assert(call_info->selected_method() != nullptr, "Unexpected null selected method");
297 tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
298 p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
299 }
300
301 // We can't check this anymore. With lazy deopt we could have already
302 // cleaned this IC entry before we even return. This is possible if
303 // we ran out of space in the inline cache buffer trying to do the
304 // set_next and we safepointed to free up space. This is a benign
492 // cleaned this IC entry before we even return. This is possible if
493 // we ran out of space in the inline cache buffer trying to do the
494 // set_next and we safepointed to free up space. This is a benign
495 // race because the IC entry was complete when we safepointed so
496 // cleaning it immediately is harmless.
497 // assert(is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
498 return true;
499 }
500
501
502 // is_optimized: Compiler has generated an optimized call (i.e. fixed, no inline cache)
503 // static_bound: The call can be static bound. If it isn't also optimized, the property
504 // wasn't provable at time of compilation. An optimized call will have any necessary
505 // null check, while a static_bound won't. A static_bound (but not optimized) must
506 // therefore use the unverified entry point.
507 void CompiledIC::compute_monomorphic_entry(const methodHandle& method,
508 Klass* receiver_klass,
509 bool is_optimized,
510 bool static_bound,
511 bool caller_is_nmethod,
512 bool caller_is_c1,
513 CompiledICInfo& info,
514 TRAPS) {
515 CompiledMethod* method_code = method->code();
516
517 address entry = nullptr;
518 if (method_code != nullptr && method_code->is_in_use() && !method_code->is_unloading()) {
519 assert(method_code->is_compiled(), "must be compiled");
520 // Call to compiled code
521 //
522 // Note: the following problem exists with Compiler1:
523 // - at compile time we may or may not know if the destination is final
524 // - if we know that the destination is final (is_optimized), we will emit
525 // an optimized virtual call (no inline cache), and need a Method* to make
526 // a call to the interpreter
527 // - if we don't know if the destination is final, we emit a standard
528 // virtual call, and use CompiledICHolder to call interpreted code
529 // (no static call stub has been generated)
530 // - In the case that we here notice the call is static bound we
531 // convert the call into what looks to be an optimized virtual call,
532 // but we must use the unverified entry point (since there will be no
533 // null check on a call when the target isn't loaded).
534 // This causes problems when verifying the IC because
535 // it looks vanilla but is optimized. Code in is_call_to_interpreted
536 // is aware of this and weakens its asserts.
537 if (is_optimized) {
538 entry = caller_is_c1 ? method_code->verified_inline_entry_point() : method_code->verified_entry_point();
539 } else {
540 entry = caller_is_c1 ? method_code->inline_entry_point() : method_code->entry_point();
541 }
542 }
543 if (entry != nullptr) {
544 // Call to near compiled code.
545 info.set_compiled_entry(entry, is_optimized ? nullptr : receiver_klass, is_optimized);
546 } else {
547 if (is_optimized) {
548 // Use stub entry
549 address entry = caller_is_c1 ? method()->get_c2i_inline_entry() : method()->get_c2i_entry();
550 info.set_interpreter_entry(entry, method());
551 } else {
552 // Use icholder entry
553 assert(method_code == nullptr || method_code->is_compiled(), "must be compiled");
554 CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
555 entry = (caller_is_c1)? method()->get_c2i_unverified_inline_entry() : method()->get_c2i_unverified_entry();
556 info.set_icholder_entry(entry, holder);
557 }
558 }
559 assert(info.is_optimized() == is_optimized, "must agree");
560 }
561
562
563 bool CompiledIC::is_icholder_entry(address entry) {
564 CodeBlob* cb = CodeCache::find_blob(entry);
565 if (cb == nullptr) {
566 return false;
567 }
568 if (cb->is_adapter_blob()) {
569 return true;
570 } else if (cb->is_vtable_blob()) {
571 return VtableStubs::is_icholder_entry(entry);
572 }
573 return false;
574 }
575
576 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm) {
622 }
623
624 void CompiledStaticCall::set(const StaticCallInfo& info) {
625 assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
626 // Updating a cache to the wrong entry can cause bugs that are very hard
627 // to track down - if cache entry gets invalid - we just clean it. In
628 // this way it is always the same code path that is responsible for
629 // updating and resolving an inline cache
630 assert(is_clean(), "do not update a call entry - use clean");
631
632 if (info._to_interpreter) {
633 // Call to interpreted code
634 set_to_interpreted(info.callee(), info.entry());
635 } else {
636 set_to_compiled(info.entry());
637 }
638 }
639
640 // Compute settings for a CompiledStaticCall. Since we might have to set
641 // the stub when calling to the interpreter, we need to return arguments.
642 void CompiledStaticCall::compute_entry(const methodHandle& m, CompiledMethod* caller_nm, StaticCallInfo& info) {
643 assert(!m->mismatch(), "Mismatch for static call");
644 bool caller_is_nmethod = caller_nm->is_nmethod();
645 CompiledMethod* m_code = m->code();
646 info._callee = m;
647 if (m_code != nullptr && m_code->is_in_use() && !m_code->is_unloading()) {
648 info._to_interpreter = false;
649 if (caller_nm->is_compiled_by_c1()) {
650 info._entry = m_code->verified_inline_entry_point();
651 } else {
652 info._entry = m_code->verified_entry_point();
653 }
654 } else {
655 // Callee is interpreted code. In any case entering the interpreter
656 // puts a converter-frame on the stack to save arguments.
657 assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");
658 info._to_interpreter = true;
659 if (caller_nm->is_compiled_by_c1()) {
660 // C1 -> interp: values passed as oops
661 info._entry = m()->get_c2i_inline_entry();
662 } else {
663 // C2 -> interp: values passed as fields
664 info._entry = m()->get_c2i_entry();
665 }
666 }
667 }
668
669 void CompiledStaticCall::compute_entry_for_continuation_entry(const methodHandle& m, StaticCallInfo& info) {
670 if (ContinuationEntry::is_interpreted_call(instruction_address())) {
671 info._to_interpreter = true;
672 info._entry = m()->get_c2i_entry();
673 }
674 }
675
676 address CompiledDirectStaticCall::find_stub_for(address instruction) {
677 // Find reloc. information containing this call-site
678 RelocIterator iter((nmethod*)nullptr, instruction);
679 while (iter.next()) {
680 if (iter.addr() == instruction) {
681 switch(iter.type()) {
682 case relocInfo::static_call_type:
683 return iter.static_call_reloc()->static_stub();
684 // We check here for opt_virtual_call_type, since we reuse the code
685 // from the CompiledIC implementation
|