< prev index next >

src/hotspot/share/code/compiledIC.cpp

Print this page


   1 /*
   2  * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *


 227 CompiledIC::CompiledIC(RelocIterator* iter)
 228   : _method(iter->code())
 229 {
 230   _call = _method->call_wrapper_at(iter->addr());
 231   address ic_call = _call->instruction_address();
 232 
 233   CompiledMethod* nm = iter->code();
 234   assert(ic_call != NULL, "ic_call address must be set");
 235   assert(nm != NULL, "must pass compiled method");
 236   assert(nm->contains(ic_call), "must be in compiled method");
 237 
 238   initialize_from_iter(iter);
 239 }
 240 
 241 // This function may fail for two reasons: either due to running out of vtable
 242 // stubs, or due to running out of IC stubs in an attempted transition to a
 243 // transitional state. The needs_ic_stub_refill value will be set if the failure
 244 // was due to running out of IC stubs, in which case the caller will refill IC
 245 // stubs and retry.
 246 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode,
 247                                     bool& needs_ic_stub_refill, TRAPS) {
 248   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 249   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
 250   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
 251 
 252   address entry;
 253   if (call_info->call_kind() == CallInfo::itable_call) {
 254     assert(bytecode == Bytecodes::_invokeinterface, "");
 255     int itable_index = call_info->itable_index();
 256     entry = VtableStubs::find_itable_stub(itable_index);
 257     if (entry == NULL) {
 258       return false;
 259     }
 260 #ifdef ASSERT
 261     int index = call_info->resolved_method()->itable_index();
 262     assert(index == itable_index, "CallInfo pre-computes this");
 263     InstanceKlass* k = call_info->resolved_method()->method_holder();
 264     assert(k->verify_itable_index(itable_index), "sanity check");
 265 #endif //ASSERT
 266     CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
 267                                                     call_info->resolved_klass(), false);
 268     holder->claim();
 269     if (!InlineCacheBuffer::create_transition_stub(this, holder, entry)) {
 270       delete holder;
 271       needs_ic_stub_refill = true;
 272       return false;
 273     }
 274   } else {
 275     assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
 276     // Can be different than selected_method->vtable_index(), due to package-private etc.
 277     int vtable_index = call_info->vtable_index();
 278     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
 279     entry = VtableStubs::find_vtable_stub(vtable_index);
 280     if (entry == NULL) {
 281       return false;
 282     }
 283     if (!InlineCacheBuffer::create_transition_stub(this, NULL, entry)) {
 284       needs_ic_stub_refill = true;
 285       return false;
 286     }
 287   }
 288 
 289   if (TraceICs) {
 290     ResourceMark rm;
 291     assert(!call_info->selected_method().is_null(), "Unexpected null selected method");
 292     tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
 293                    p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
 294   }
 295 
 296   // We can't check this anymore. With lazy deopt we could have already
 297   // cleaned this IC entry before we even return. This is possible if
 298   // we ran out of space in the inline cache buffer trying to do the
 299   // set_next and we safepointed to free up space. This is a benign


 494   // cleaned this IC entry before we even return. This is possible if
 495   // we ran out of space in the inline cache buffer trying to do the
 496   // set_next and we safepointed to free up space. This is a benign
 497   // race because the IC entry was complete when we safepointed so
 498   // cleaning it immediately is harmless.
 499   // assert(is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 500   return true;
 501 }
 502 
 503 
 504 // is_optimized: Compiler has generated an optimized call (i.e. fixed, no inline cache)
 505 // static_bound: The call can be static bound. If it isn't also optimized, the property
 506 // wasn't provable at time of compilation. An optimized call will have any necessary
 507 // null check, while a static_bound won't. A static_bound (but not optimized) must
 508 // therefore use the unverified entry point.
 509 void CompiledIC::compute_monomorphic_entry(const methodHandle& method,
 510                                            Klass* receiver_klass,
 511                                            bool is_optimized,
 512                                            bool static_bound,
 513                                            bool caller_is_nmethod,

 514                                            CompiledICInfo& info,
 515                                            TRAPS) {
 516   CompiledMethod* method_code = method->code();
 517 
 518   address entry = NULL;
 519   if (method_code != NULL && method_code->is_in_use()) {
 520     assert(method_code->is_compiled(), "must be compiled");
 521     // Call to compiled code
 522     //
 523     // Note: the following problem exists with Compiler1:
 524     //   - at compile time we may or may not know if the destination is final
 525     //   - if we know that the destination is final (is_optimized), we will emit
 526     //     an optimized virtual call (no inline cache), and need a Method* to make
 527     //     a call to the interpreter
 528     //   - if we don't know if the destination is final, we emit a standard
 529     //     virtual call, and use CompiledICHolder to call interpreted code
 530     //     (no static call stub has been generated)
 531     //   - In the case that we here notice the call is static bound we
 532     //     convert the call into what looks to be an optimized virtual call,
 533     //     but we must use the unverified entry point (since there will be no
 534     //     null check on a call when the target isn't loaded).
 535     //     This causes problems when verifying the IC because
 536     //     it looks vanilla but is optimized. Code in is_call_to_interpreted
 537     //     is aware of this and weakens its asserts.
 538     if (is_optimized) {
 539       entry      = method_code->verified_entry_point();
 540     } else {
 541       entry      = method_code->entry_point();
 542     }
 543   }
 544   bool far_c2a = entry != NULL && caller_is_nmethod && method_code->is_far_code();
 545   if (entry != NULL && !far_c2a) {
 546     // Call to near compiled code (nmethod or aot).
 547     info.set_compiled_entry(entry, is_optimized ? NULL : receiver_klass, is_optimized);
 548   } else {
 549     if (is_optimized) {
 550       if (far_c2a) {
 551         // Call to aot code from nmethod.
 552         info.set_aot_entry(entry, method());
 553       } else {
 554         // Use stub entry
 555         info.set_interpreter_entry(method()->get_c2i_entry(), method());

 556       }
 557     } else {
 558       // Use icholder entry
 559       assert(method_code == NULL || method_code->is_compiled(), "must be compiled");
 560       CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
 561       info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);

 562     }
 563   }
 564   assert(info.is_optimized() == is_optimized, "must agree");
 565 }
 566 
 567 
 568 bool CompiledIC::is_icholder_entry(address entry) {
 569   CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
 570   if (cb != NULL && cb->is_adapter_blob()) {
 571     return true;
 572   }
 573   // itable stubs also use CompiledICHolder
 574   if (cb != NULL && cb->is_vtable_blob()) {
 575     VtableStub* s = VtableStubs::entry_point(entry);
 576     return (s != NULL) && s->is_itable_stub();
 577   }
 578 
 579   return false;
 580 }
 581 


 640   // to track down - if cache entry gets invalid - we just clean it. In
 641   // this way it is always the same code path that is responsible for
 642   // updating and resolving an inline cache
 643   assert(is_clean(), "do not update a call entry - use clean");
 644 
 645   if (info._to_interpreter) {
 646     // Call to interpreted code
 647     set_to_interpreted(info.callee(), info.entry());
 648 #if INCLUDE_AOT
 649   } else if (info._to_aot) {
 650     // Call to far code
 651     set_to_far(info.callee(), info.entry());
 652 #endif
 653   } else {
 654     set_to_compiled(info.entry());
 655   }
 656 }
 657 
 658 // Compute settings for a CompiledStaticCall. Since we might have to set
 659 // the stub when calling to the interpreter, we need to return arguments.
 660 void CompiledStaticCall::compute_entry(const methodHandle& m, bool caller_is_nmethod, StaticCallInfo& info) {

 661   CompiledMethod* m_code = m->code();
 662   info._callee = m;
 663   if (m_code != NULL && m_code->is_in_use()) {
 664     if (caller_is_nmethod && m_code->is_far_code()) {
 665       // Call to far aot code from nmethod.
 666       info._to_aot = true;
 667     } else {
 668       info._to_aot = false;
 669     }
 670     info._to_interpreter = false;
 671     info._entry  = m_code->verified_entry_point();




 672   } else {
 673     // Callee is interpreted code.  In any case entering the interpreter
 674     // puts a converter-frame on the stack to save arguments.
 675     assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");
 676     info._to_interpreter = true;
 677     info._entry      = m()->get_c2i_entry();







 678   }
 679 }
 680 
 681 address CompiledDirectStaticCall::find_stub_for(address instruction, bool is_aot) {
 682   // Find reloc. information containing this call-site
 683   RelocIterator iter((nmethod*)NULL, instruction);
 684   while (iter.next()) {
 685     if (iter.addr() == instruction) {
 686       switch(iter.type()) {
 687         case relocInfo::static_call_type:
 688           return iter.static_call_reloc()->static_stub(is_aot);
 689         // We check here for opt_virtual_call_type, since we reuse the code
 690         // from the CompiledIC implementation
 691         case relocInfo::opt_virtual_call_type:
 692           return iter.opt_virtual_call_reloc()->static_stub(is_aot);
 693         case relocInfo::poll_type:
 694         case relocInfo::poll_return_type: // A safepoint can't overlap a call.
 695         default:
 696           ShouldNotReachHere();
 697       }


   1 /*
   2  * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *


 227 CompiledIC::CompiledIC(RelocIterator* iter)
 228   : _method(iter->code())
 229 {
 230   _call = _method->call_wrapper_at(iter->addr());
 231   address ic_call = _call->instruction_address();
 232 
 233   CompiledMethod* nm = iter->code();
 234   assert(ic_call != NULL, "ic_call address must be set");
 235   assert(nm != NULL, "must pass compiled method");
 236   assert(nm->contains(ic_call), "must be in compiled method");
 237 
 238   initialize_from_iter(iter);
 239 }
 240 
 241 // This function may fail for two reasons: either due to running out of vtable
 242 // stubs, or due to running out of IC stubs in an attempted transition to a
 243 // transitional state. The needs_ic_stub_refill value will be set if the failure
 244 // was due to running out of IC stubs, in which case the caller will refill IC
 245 // stubs and retry.
 246 bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode,
 247                                     bool& needs_ic_stub_refill, bool caller_is_c1, TRAPS) {
 248   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
 249   assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
 250   assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
 251 
 252   address entry;
 253   if (call_info->call_kind() == CallInfo::itable_call) {
 254     assert(bytecode == Bytecodes::_invokeinterface, "");
 255     int itable_index = call_info->itable_index();
 256     entry = VtableStubs::find_itable_stub(itable_index, caller_is_c1);
 257     if (entry == NULL) {
 258       return false;
 259     }
 260 #ifdef ASSERT
 261     int index = call_info->resolved_method()->itable_index();
 262     assert(index == itable_index, "CallInfo pre-computes this");
 263     InstanceKlass* k = call_info->resolved_method()->method_holder();
 264     assert(k->verify_itable_index(itable_index), "sanity check");
 265 #endif //ASSERT
 266     CompiledICHolder* holder = new CompiledICHolder(call_info->resolved_method()->method_holder(),
 267                                                     call_info->resolved_klass(), false);
 268     holder->claim();
 269     if (!InlineCacheBuffer::create_transition_stub(this, holder, entry)) {
 270       delete holder;
 271       needs_ic_stub_refill = true;
 272       return false;
 273     }
 274   } else {
 275     assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
 276     // Can be different than selected_method->vtable_index(), due to package-private etc.
 277     int vtable_index = call_info->vtable_index();
 278     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
 279     entry = VtableStubs::find_vtable_stub(vtable_index, caller_is_c1);
 280     if (entry == NULL) {
 281       return false;
 282     }
 283     if (!InlineCacheBuffer::create_transition_stub(this, NULL, entry)) {
 284       needs_ic_stub_refill = true;
 285       return false;
 286     }
 287   }
 288 
 289   if (TraceICs) {
 290     ResourceMark rm;
 291     assert(!call_info->selected_method().is_null(), "Unexpected null selected method");
 292     tty->print_cr ("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
 293                    p2i(instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
 294   }
 295 
 296   // We can't check this anymore. With lazy deopt we could have already
 297   // cleaned this IC entry before we even return. This is possible if
 298   // we ran out of space in the inline cache buffer trying to do the
 299   // set_next and we safepointed to free up space. This is a benign


 494   // cleaned this IC entry before we even return. This is possible if
 495   // we ran out of space in the inline cache buffer trying to do the
 496   // set_next and we safepointed to free up space. This is a benign
 497   // race because the IC entry was complete when we safepointed so
 498   // cleaning it immediately is harmless.
 499   // assert(is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
 500   return true;
 501 }
 502 
 503 
 504 // is_optimized: Compiler has generated an optimized call (i.e. fixed, no inline cache)
 505 // static_bound: The call can be static bound. If it isn't also optimized, the property
 506 // wasn't provable at time of compilation. An optimized call will have any necessary
 507 // null check, while a static_bound won't. A static_bound (but not optimized) must
 508 // therefore use the unverified entry point.
 509 void CompiledIC::compute_monomorphic_entry(const methodHandle& method,
 510                                            Klass* receiver_klass,
 511                                            bool is_optimized,
 512                                            bool static_bound,
 513                                            bool caller_is_nmethod,
 514                                            bool caller_is_c1,
 515                                            CompiledICInfo& info,
 516                                            TRAPS) {
 517   CompiledMethod* method_code = method->code();
 518 
 519   address entry = NULL;
 520   if (method_code != NULL && method_code->is_in_use()) {
 521     assert(method_code->is_compiled(), "must be compiled");
 522     // Call to compiled code
 523     //
 524     // Note: the following problem exists with Compiler1:
 525     //   - at compile time we may or may not know if the destination is final
 526     //   - if we know that the destination is final (is_optimized), we will emit
 527     //     an optimized virtual call (no inline cache), and need a Method* to make
 528     //     a call to the interpreter
 529     //   - if we don't know if the destination is final, we emit a standard
 530     //     virtual call, and use CompiledICHolder to call interpreted code
 531     //     (no static call stub has been generated)
 532     //   - In the case that we here notice the call is static bound we
 533     //     convert the call into what looks to be an optimized virtual call,
 534     //     but we must use the unverified entry point (since there will be no
 535     //     null check on a call when the target isn't loaded).
 536     //     This causes problems when verifying the IC because
 537     //     it looks vanilla but is optimized. Code in is_call_to_interpreted
 538     //     is aware of this and weakens its asserts.
 539     if (is_optimized) {
 540       entry      = caller_is_c1 ? method_code->verified_value_entry_point() : method_code->verified_entry_point();
 541     } else {
 542       entry      = caller_is_c1 ? method_code->value_entry_point() : method_code->entry_point();
 543     }
 544   }
 545   bool far_c2a = entry != NULL && caller_is_nmethod && method_code->is_far_code();
 546   if (entry != NULL && !far_c2a) {
 547     // Call to near compiled code (nmethod or aot).
 548     info.set_compiled_entry(entry, is_optimized ? NULL : receiver_klass, is_optimized);
 549   } else {
 550     if (is_optimized) {
 551       if (far_c2a) {
 552         // Call to aot code from nmethod.
 553         info.set_aot_entry(entry, method());
 554       } else {
 555         // Use stub entry
 556         address entry = caller_is_c1 ? method()->get_c2i_value_entry() : method()->get_c2i_entry();
 557         info.set_interpreter_entry(entry, method());
 558       }
 559     } else {
 560       // Use icholder entry
 561       assert(method_code == NULL || method_code->is_compiled(), "must be compiled");
 562       CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
 563       entry = (caller_is_c1)? method()->get_c2i_unverified_value_entry() : method()->get_c2i_unverified_entry();
 564       info.set_icholder_entry(entry, holder);
 565     }
 566   }
 567   assert(info.is_optimized() == is_optimized, "must agree");
 568 }
 569 
 570 
 571 bool CompiledIC::is_icholder_entry(address entry) {
 572   CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
 573   if (cb != NULL && cb->is_adapter_blob()) {
 574     return true;
 575   }
 576   // itable stubs also use CompiledICHolder
 577   if (cb != NULL && cb->is_vtable_blob()) {
 578     VtableStub* s = VtableStubs::entry_point(entry);
 579     return (s != NULL) && s->is_itable_stub();
 580   }
 581 
 582   return false;
 583 }
 584 


 643   // to track down - if cache entry gets invalid - we just clean it. In
 644   // this way it is always the same code path that is responsible for
 645   // updating and resolving an inline cache
 646   assert(is_clean(), "do not update a call entry - use clean");
 647 
 648   if (info._to_interpreter) {
 649     // Call to interpreted code
 650     set_to_interpreted(info.callee(), info.entry());
 651 #if INCLUDE_AOT
 652   } else if (info._to_aot) {
 653     // Call to far code
 654     set_to_far(info.callee(), info.entry());
 655 #endif
 656   } else {
 657     set_to_compiled(info.entry());
 658   }
 659 }
 660 
 661 // Compute settings for a CompiledStaticCall. Since we might have to set
 662 // the stub when calling to the interpreter, we need to return arguments.
 663 void CompiledStaticCall::compute_entry(const methodHandle& m, CompiledMethod* caller_nm, StaticCallInfo& info) {
 664   bool caller_is_nmethod = caller_nm->is_nmethod();
 665   CompiledMethod* m_code = m->code();
 666   info._callee = m;
 667   if (m_code != NULL && m_code->is_in_use()) {
 668     if (caller_is_nmethod && m_code->is_far_code()) {
 669       // Call to far aot code from nmethod.
 670       info._to_aot = true;
 671     } else {
 672       info._to_aot = false;
 673     }
 674     info._to_interpreter = false;
 675     if (caller_nm->is_compiled_by_c1()) {
 676       info._entry = m_code->verified_value_entry_point();
 677     } else {
 678       info._entry = m_code->verified_entry_point();
 679     }
 680   } else {
 681     // Callee is interpreted code.  In any case entering the interpreter
 682     // puts a converter-frame on the stack to save arguments.
 683     assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");
 684     info._to_interpreter = true;
 685 
 686     if (caller_nm->is_compiled_by_c1()) {
 687       // C1 -> interp: values passed as oops
 688       info._entry = m()->get_c2i_value_entry();
 689     } else {
 690       // C2 -> interp: values passed fields
 691       info._entry = m()->get_c2i_entry();
 692     }
 693   }
 694 }
 695 
 696 address CompiledDirectStaticCall::find_stub_for(address instruction, bool is_aot) {
 697   // Find reloc. information containing this call-site
 698   RelocIterator iter((nmethod*)NULL, instruction);
 699   while (iter.next()) {
 700     if (iter.addr() == instruction) {
 701       switch(iter.type()) {
 702         case relocInfo::static_call_type:
 703           return iter.static_call_reloc()->static_stub(is_aot);
 704         // We check here for opt_virtual_call_type, since we reuse the code
 705         // from the CompiledIC implementation
 706         case relocInfo::opt_virtual_call_type:
 707           return iter.opt_virtual_call_reloc()->static_stub(is_aot);
 708         case relocInfo::poll_type:
 709         case relocInfo::poll_return_type: // A safepoint can't overlap a call.
 710         default:
 711           ShouldNotReachHere();
 712       }


< prev index next >