< prev index next >

src/hotspot/share/code/compiledIC.cpp

Print this page
*** 244,20 ***
  // stubs, or due to running out of IC stubs in an attempted transition to a
  // transitional state. The needs_ic_stub_refill value will be set if the failure
  // was due to running out of IC stubs, in which case the caller will refill IC
  // stubs and retry.
  bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode,
!                                     bool& needs_ic_stub_refill, TRAPS) {
    assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
    assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
    assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
  
    address entry;
    if (call_info->call_kind() == CallInfo::itable_call) {
      assert(bytecode == Bytecodes::_invokeinterface, "");
      int itable_index = call_info->itable_index();
!     entry = VtableStubs::find_itable_stub(itable_index);
      if (entry == nullptr) {
        return false;
      }
  #ifdef ASSERT
      int index = call_info->resolved_method()->itable_index();
--- 244,20 ---
  // stubs, or due to running out of IC stubs in an attempted transition to a
  // transitional state. The needs_ic_stub_refill value will be set if the failure
  // was due to running out of IC stubs, in which case the caller will refill IC
  // stubs and retry.
  bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode,
!                                     bool& needs_ic_stub_refill, bool caller_is_c1, TRAPS) {
    assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
    assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
    assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
  
    address entry;
    if (call_info->call_kind() == CallInfo::itable_call) {
      assert(bytecode == Bytecodes::_invokeinterface, "");
      int itable_index = call_info->itable_index();
!     entry = VtableStubs::find_itable_stub(itable_index, caller_is_c1);
      if (entry == nullptr) {
        return false;
      }
  #ifdef ASSERT
      int index = call_info->resolved_method()->itable_index();

*** 279,11 ***
    } else {
      assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
      // Can be different than selected_method->vtable_index(), due to package-private etc.
      int vtable_index = call_info->vtable_index();
      assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
!     entry = VtableStubs::find_vtable_stub(vtable_index);
      if (entry == nullptr) {
        return false;
      }
      if (!InlineCacheBuffer::create_transition_stub(this, nullptr, entry)) {
        needs_ic_stub_refill = true;
--- 279,11 ---
    } else {
      assert(call_info->call_kind() == CallInfo::vtable_call, "either itable or vtable");
      // Can be different than selected_method->vtable_index(), due to package-private etc.
      int vtable_index = call_info->vtable_index();
      assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
!     entry = VtableStubs::find_vtable_stub(vtable_index, caller_is_c1);
      if (entry == nullptr) {
        return false;
      }
      if (!InlineCacheBuffer::create_transition_stub(this, nullptr, entry)) {
        needs_ic_stub_refill = true;

*** 507,10 ***
--- 507,11 ---
  void CompiledIC::compute_monomorphic_entry(const methodHandle& method,
                                             Klass* receiver_klass,
                                             bool is_optimized,
                                             bool static_bound,
                                             bool caller_is_nmethod,
+                                            bool caller_is_c1,
                                             CompiledICInfo& info,
                                             TRAPS) {
    CompiledMethod* method_code = method->code();
  
    address entry = nullptr;

*** 532,27 ***
      //     null check on a call when the target isn't loaded).
      //     This causes problems when verifying the IC because
      //     it looks vanilla but is optimized. Code in is_call_to_interpreted
      //     is aware of this and weakens its asserts.
      if (is_optimized) {
!       entry      = method_code->verified_entry_point();
      } else {
!       entry      = method_code->entry_point();
      }
    }
    if (entry != nullptr) {
      // Call to near compiled code.
      info.set_compiled_entry(entry, is_optimized ? nullptr : receiver_klass, is_optimized);
    } else {
      if (is_optimized) {
        // Use stub entry
!       info.set_interpreter_entry(method()->get_c2i_entry(), method());
      } else {
        // Use icholder entry
        assert(method_code == nullptr || method_code->is_compiled(), "must be compiled");
        CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
!       info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder);
      }
    }
    assert(info.is_optimized() == is_optimized, "must agree");
  }
  
--- 533,29 ---
      //     null check on a call when the target isn't loaded).
      //     This causes problems when verifying the IC because
      //     it looks vanilla but is optimized. Code in is_call_to_interpreted
      //     is aware of this and weakens its asserts.
      if (is_optimized) {
!       entry      = caller_is_c1 ? method_code->verified_inline_entry_point() : method_code->verified_entry_point();
      } else {
!       entry      = caller_is_c1 ? method_code->inline_entry_point() : method_code->entry_point();
      }
    }
    if (entry != nullptr) {
      // Call to near compiled code.
      info.set_compiled_entry(entry, is_optimized ? nullptr : receiver_klass, is_optimized);
    } else {
      if (is_optimized) {
        // Use stub entry
!       address entry = caller_is_c1 ? method()->get_c2i_inline_entry() : method()->get_c2i_entry();
+       info.set_interpreter_entry(entry, method());
      } else {
        // Use icholder entry
        assert(method_code == nullptr || method_code->is_compiled(), "must be compiled");
        CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass);
!       entry = (caller_is_c1)? method()->get_c2i_unverified_inline_entry() : method()->get_c2i_unverified_entry();
+       info.set_icholder_entry(entry, holder);
      }
    }
    assert(info.is_optimized() == is_optimized, "must agree");
  }
  

*** 634,22 ***
    }
  }
  
  // Compute settings for a CompiledStaticCall. Since we might have to set
  // the stub when calling to the interpreter, we need to return arguments.
! void CompiledStaticCall::compute_entry(const methodHandle& m, bool caller_is_nmethod, StaticCallInfo& info) {
    CompiledMethod* m_code = m->code();
    info._callee = m;
    if (m_code != nullptr && m_code->is_in_use() && !m_code->is_unloading()) {
      info._to_interpreter = false;
!     info._entry  = m_code->verified_entry_point();
    } else {
      // Callee is interpreted code.  In any case entering the interpreter
      // puts a converter-frame on the stack to save arguments.
      assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");
      info._to_interpreter = true;
!     info._entry      = m()->get_c2i_entry();
    }
  }
  
  void CompiledStaticCall::compute_entry_for_continuation_entry(const methodHandle& m, StaticCallInfo& info) {
    if (ContinuationEntry::is_interpreted_call(instruction_address())) {
--- 637,34 ---
    }
  }
  
  // Compute settings for a CompiledStaticCall. Since we might have to set
  // the stub when calling to the interpreter, we need to return arguments.
! void CompiledStaticCall::compute_entry(const methodHandle& m, CompiledMethod* caller_nm, StaticCallInfo& info) {
+   assert(!m->mismatch(), "Mismatch for static call");
+   bool caller_is_nmethod = caller_nm->is_nmethod();
    CompiledMethod* m_code = m->code();
    info._callee = m;
    if (m_code != nullptr && m_code->is_in_use() && !m_code->is_unloading()) {
      info._to_interpreter = false;
!     if (caller_nm->is_compiled_by_c1()) {
+       info._entry = m_code->verified_inline_entry_point();
+     } else {
+       info._entry = m_code->verified_entry_point();
+     }
    } else {
      // Callee is interpreted code.  In any case entering the interpreter
      // puts a converter-frame on the stack to save arguments.
      assert(!m->is_method_handle_intrinsic(), "Compiled code should never call interpreter MH intrinsics");
      info._to_interpreter = true;
!     if (caller_nm->is_compiled_by_c1()) {
+       // C1 -> interp: values passed as oops
+       info._entry = m()->get_c2i_inline_entry();
+     } else {
+       // C2 -> interp: values passed as fields
+       info._entry = m()->get_c2i_entry();
+     }
    }
  }
  
  void CompiledStaticCall::compute_entry_for_continuation_entry(const methodHandle& m, StaticCallInfo& info) {
    if (ContinuationEntry::is_interpreted_call(instruction_address())) {
< prev index next >