1 /*
  2  * Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "code/codeBehaviours.hpp"
 26 #include "code/codeCache.hpp"
 27 #include "code/compiledIC.hpp"
 28 #include "code/nmethod.hpp"
 29 #include "code/vtableStubs.hpp"
 30 #include "memory/resourceArea.hpp"
 31 #include "memory/universe.hpp"
 32 #include "oops/compressedKlass.hpp"
 33 #include "oops/klass.inline.hpp"
 34 #include "oops/method.inline.hpp"
 35 #include "runtime/atomicAccess.hpp"
 36 #include "runtime/continuationEntry.hpp"
 37 #include "runtime/handles.inline.hpp"
 38 #include "runtime/interfaceSupport.inline.hpp"
 39 #include "runtime/sharedRuntime.hpp"
 40 #include "sanitizers/leak.hpp"
 41 
 42 
 43 // Every time a compiled IC is changed or its type is being accessed,
 44 // either the CompiledIC_lock must be set or we must be at a safe point.
 45 
 46 CompiledICLocker::CompiledICLocker(nmethod* method)
 47   : _method(method),
 48     _behaviour(CompiledICProtectionBehaviour::current()),
 49     _locked(_behaviour->lock(_method)) {
 50 }
 51 
 52 CompiledICLocker::~CompiledICLocker() {
 53   if (_locked) {
 54     _behaviour->unlock(_method);
 55   }
 56 }
 57 
 58 bool CompiledICLocker::is_safe(nmethod* nm) {
 59   return CompiledICProtectionBehaviour::current()->is_safe(nm);
 60 }
 61 
 62 bool CompiledICLocker::is_safe(address code) {
 63   CodeBlob* cb = CodeCache::find_blob(code);
 64   assert(cb != nullptr && cb->is_nmethod(), "must be compiled");
 65   nmethod* nm = cb->as_nmethod();
 66   return CompiledICProtectionBehaviour::current()->is_safe(nm);
 67 }
 68 
 69 CompiledICData::CompiledICData()
 70   : _speculated_method(),
 71     _speculated_klass(),
 72     _itable_defc_klass(),
 73     _itable_refc_klass(),
 74     _is_initialized() {}
 75 
 76 // Inline cache callsite info is initialized once the first time it is resolved
 77 void CompiledICData::initialize(CallInfo* call_info, Klass* receiver_klass) {
 78   _speculated_method = call_info->selected_method();
 79   _speculated_klass = (uintptr_t)CompressedKlassPointers::encode_not_null(receiver_klass);
 80   if (call_info->call_kind() == CallInfo::itable_call) {
 81     assert(call_info->resolved_method() != nullptr, "virtual or interface method must be found");
 82     _itable_defc_klass = call_info->resolved_method()->method_holder();
 83     _itable_refc_klass = call_info->resolved_klass();
 84   }
 85   _is_initialized = true;
 86 }
 87 
 88 bool CompiledICData::is_speculated_klass_unloaded() const {
 89   return is_initialized() && _speculated_klass == 0;
 90 }
 91 
 92 void CompiledICData::clean_metadata() {
 93   if (!is_initialized() || is_speculated_klass_unloaded()) {
 94     return;
 95   }
 96 
 97   // GC cleaning doesn't need to change the state of the inline cache,
 98   // only nuke stale speculated metadata if it gets unloaded. If the
 99   // inline cache is monomorphic, the unverified entries will miss, and
100   // subsequent miss handlers will upgrade the callsite to megamorphic,
101   // which makes sense as it obviously is megamorphic then.
102   if (!speculated_klass()->is_loader_alive()) {
103     AtomicAccess::store(&_speculated_klass, (uintptr_t)0);
104     AtomicAccess::store(&_speculated_method, (Method*)nullptr);
105   }
106 
107   assert(_speculated_method == nullptr || _speculated_method->method_holder()->is_loader_alive(),
108          "Speculated method is not unloaded despite class being unloaded");
109 }
110 
111 void CompiledICData::metadata_do(MetadataClosure* cl) {
112   if (!is_initialized()) {
113     return;
114   }
115 
116   if (!is_speculated_klass_unloaded()) {
117     cl->do_metadata(_speculated_method);
118     cl->do_metadata(speculated_klass());
119   }
120   if (_itable_refc_klass != nullptr) {
121     cl->do_metadata(_itable_refc_klass);
122   }
123   if (_itable_defc_klass != nullptr) {
124     cl->do_metadata(_itable_defc_klass);
125   }
126 }
127 
128 Klass* CompiledICData::speculated_klass() const {
129   if (is_speculated_klass_unloaded()) {
130     return nullptr;
131   }
132   return CompressedKlassPointers::decode_not_null((narrowKlass)_speculated_klass);
133 }
134 
135 //-----------------------------------------------------------------------------
136 // High-level access to an inline cache. Guaranteed to be MT-safe.
137 
138 CompiledICData* CompiledIC::data() const {
139   return _data;
140 }
141 
142 CompiledICData* data_from_reloc_iter(RelocIterator* iter) {
143   assert(iter->type() == relocInfo::virtual_call_type, "wrong reloc. info");
144 
145   virtual_call_Relocation* r = iter->virtual_call_reloc();
146   NativeMovConstReg* value = nativeMovConstReg_at(r->cached_value());
147 
148   return (CompiledICData*)value->data();
149 }
150 
151 CompiledIC::CompiledIC(RelocIterator* iter)
152   : _method(iter->code()),
153     _data(data_from_reloc_iter(iter)),
154     _call(nativeCall_at(iter->addr()))
155 {
156   assert(_method != nullptr, "must pass compiled method");
157   assert(_method->contains(iter->addr()), "must be in compiled method");
158   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
159 }
160 
161 CompiledIC* CompiledIC_before(nmethod* nm, address return_addr) {
162   address call_site = nativeCall_before(return_addr)->instruction_address();
163   return CompiledIC_at(nm, call_site);
164 }
165 
166 CompiledIC* CompiledIC_at(nmethod* nm, address call_site) {
167   RelocIterator iter(nm, call_site, call_site + 1);
168   iter.next();
169   return CompiledIC_at(&iter);
170 }
171 
172 CompiledIC* CompiledIC_at(Relocation* call_reloc) {
173   address call_site = call_reloc->addr();
174   nmethod* nm = CodeCache::find_blob(call_reloc->addr())->as_nmethod();
175   return CompiledIC_at(nm, call_site);
176 }
177 
178 CompiledIC* CompiledIC_at(RelocIterator* reloc_iter) {
179   CompiledIC* c_ic = new CompiledIC(reloc_iter);
180   c_ic->verify();
181   return c_ic;
182 }
183 
184 void CompiledIC::ensure_initialized(CallInfo* call_info, Klass* receiver_klass) {
185   if (!_data->is_initialized()) {
186     _data->initialize(call_info, receiver_klass);
187   }
188 }
189 
190 void CompiledIC::set_to_clean() {
191   log_debug(inlinecache)("IC@" INTPTR_FORMAT ": set to clean", p2i(_call->instruction_address()));
192   _call->set_destination_mt_safe(SharedRuntime::get_resolve_virtual_call_stub());
193 }
194 
195 void CompiledIC::set_to_monomorphic() {
196   assert(data()->is_initialized(), "must be initialized");
197   Method* method = data()->speculated_method();
198   nmethod* code = method->code();
199   address entry;
200   bool to_compiled = code != nullptr && code->is_in_use() && !code->is_unloading();
201 
202   if (to_compiled) {
203     entry = code->entry_point();
204   } else {
205     entry = method->get_c2i_unverified_entry();
206   }
207 
208   log_trace(inlinecache)("IC@" INTPTR_FORMAT ": monomorphic to %s: %s",
209                          p2i(_call->instruction_address()),
210                          to_compiled ? "compiled" : "interpreter",
211                          method->print_value_string());
212 
213   _call->set_destination_mt_safe(entry);
214 }
215 
216 void CompiledIC::set_to_megamorphic(CallInfo* call_info) {
217   assert(data()->is_initialized(), "must be initialized");
218 
219   address entry;
220   if (call_info->call_kind() == CallInfo::direct_call) {
221     // C1 sometimes compiles a callsite before the target method is loaded, resulting in
222     // dynamically bound callsites that should really be statically bound. However, the
223     // target method might not have a vtable or itable. We just wait for better code to arrive
224     return;
225   } else if (call_info->call_kind() == CallInfo::itable_call) {
226     int itable_index = call_info->itable_index();
227     entry = VtableStubs::find_itable_stub(itable_index);
228     if (entry == nullptr) {
229       return;
230     }
231 #ifdef ASSERT
232     assert(call_info->resolved_method() != nullptr, "virtual or interface method must be found");
233     int index = call_info->resolved_method()->itable_index();
234     assert(index == itable_index, "CallInfo pre-computes this");
235     InstanceKlass* k = call_info->resolved_method()->method_holder();
236     assert(k->verify_itable_index(itable_index), "sanity check");
237 #endif //ASSERT
238   } else {
239     assert(call_info->call_kind() == CallInfo::vtable_call, "what else?");
240     // Can be different than selected_method->vtable_index(), due to package-private etc.
241     int vtable_index = call_info->vtable_index();
242     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
243     entry = VtableStubs::find_vtable_stub(vtable_index);
244     if (entry == nullptr) {
245       return;
246     }
247   }
248 
249   assert(call_info->selected_method() != nullptr, "virtual or interface method must be found");
250   log_trace(inlinecache)("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
251                          p2i(_call->instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
252 
253   _call->set_destination_mt_safe(entry);
254   assert(is_megamorphic(), "sanity check");
255 }
256 
257 void CompiledIC::update(CallInfo* call_info, Klass* receiver_klass) {
258   // If this is the first time we fix the inline cache, we ensure it's initialized
259   ensure_initialized(call_info, receiver_klass);
260 
261   if (is_megamorphic()) {
262     // Terminal state for the inline cache
263     return;
264   }
265 
266   if (is_speculated_klass(receiver_klass)) {
267     // If the speculated class matches the receiver klass, we can speculate that will
268     // continue to be the case with a monomorphic inline cache
269     set_to_monomorphic();
270   } else {
271     // If the dynamic type speculation fails, we try to transform to a megamorphic state
272     // for the inline cache using stubs to dispatch in tables
273     set_to_megamorphic(call_info);
274   }
275 }
276 
277 bool CompiledIC::is_clean() const {
278   return destination() == SharedRuntime::get_resolve_virtual_call_stub();
279 }
280 
281 bool CompiledIC::is_monomorphic() const {
282   return !is_clean() && !is_megamorphic();
283 }
284 
285 bool CompiledIC::is_megamorphic() const {
286   return VtableStubs::entry_point(destination()) != nullptr;
287 }
288 
289 bool CompiledIC::is_speculated_klass(Klass* receiver_klass) {
290   return data()->speculated_klass() == receiver_klass;
291 }
292 
293 // GC support
294 void CompiledIC::clean_metadata() {
295   data()->clean_metadata();
296 }
297 
298 void CompiledIC::metadata_do(MetadataClosure* cl) {
299   data()->metadata_do(cl);
300 }
301 
302 #ifndef PRODUCT
303 void CompiledIC::print() {
304   tty->print("Inline cache at " INTPTR_FORMAT ", calling " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
305              p2i(instruction_address()), p2i(destination()), p2i(data()));
306   tty->cr();
307 }
308 
309 void CompiledIC::verify() {
310   _call->verify();
311 }
312 #endif
313 
314 // ----------------------------------------------------------------------------
315 
316 void CompiledDirectCall::set_to_clean() {
317   // in_use is unused but needed to match template function in nmethod
318   assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
319   // Reset call site
320   RelocIterator iter((nmethod*)nullptr, instruction_address(), instruction_address() + 1);
321   while (iter.next()) {
322     switch(iter.type()) {
323     case relocInfo::static_call_type:
324       _call->set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
325       break;
326     case relocInfo::opt_virtual_call_type:
327       _call->set_destination_mt_safe(SharedRuntime::get_resolve_opt_virtual_call_stub());
328       break;
329     default:
330       ShouldNotReachHere();
331     }
332   }
333   assert(is_clean(), "should be clean after cleaning");
334 
335   log_debug(inlinecache)("DC@" INTPTR_FORMAT ": set to clean", p2i(_call->instruction_address()));
336 }
337 
338 void CompiledDirectCall::set(const methodHandle& callee_method) {
339   nmethod* code = callee_method->code();
340   nmethod* caller = CodeCache::find_nmethod(instruction_address());
341   assert(caller != nullptr, "did not find caller nmethod");
342 
343   bool to_interp_cont_enter = caller->method()->is_continuation_enter_intrinsic() &&
344                               ContinuationEntry::is_interpreted_call(instruction_address());
345 
346   bool to_compiled = !to_interp_cont_enter && code != nullptr && code->is_in_use() && !code->is_unloading();
347 
348   if (to_compiled) {
349     _call->set_destination_mt_safe(code->verified_entry_point());
350     assert(is_call_to_compiled(), "should be compiled after set to compiled");
351   } else {
352     // Patch call site to C2I adapter if code is deoptimized or unloaded.
353     // We also need to patch the static call stub to set the rmethod register
354     // to the callee_method so the c2i adapter knows how to build the frame
355     set_to_interpreted(callee_method, callee_method->get_c2i_entry());
356     assert(is_call_to_interpreted(), "should be interpreted after set to interpreted");
357   }
358 
359   log_trace(inlinecache)("DC@" INTPTR_FORMAT ": set to %s: %s: " INTPTR_FORMAT,
360                          p2i(_call->instruction_address()),
361                          to_compiled ? "compiled" : "interpreter",
362                          callee_method->print_value_string(),
363                          p2i(_call->destination()));
364 }
365 
366 bool CompiledDirectCall::is_clean() const {
367   return destination() == SharedRuntime::get_resolve_static_call_stub() ||
368          destination() == SharedRuntime::get_resolve_opt_virtual_call_stub();
369 }
370 
371 bool CompiledDirectCall::is_call_to_interpreted() const {
372   // It is a call to interpreted, if it calls to a stub. Hence, the destination
373   // must be in the stub part of the nmethod that contains the call
374   nmethod* nm = CodeCache::find_nmethod(instruction_address());
375   assert(nm != nullptr, "did not find nmethod");
376   return nm->stub_contains(destination());
377 }
378 
379 bool CompiledDirectCall::is_call_to_compiled() const {
380   nmethod* caller = CodeCache::find_nmethod(instruction_address());
381   assert(caller != nullptr, "did not find caller nmethod");
382   CodeBlob* dest_cb = CodeCache::find_blob(destination());
383   return !caller->stub_contains(destination()) && dest_cb->is_nmethod();
384 }
385 
386 address CompiledDirectCall::find_stub_for(address instruction) {
387   // Find reloc. information containing this call-site
388   RelocIterator iter((nmethod*)nullptr, instruction);
389   while (iter.next()) {
390     if (iter.addr() == instruction) {
391       switch(iter.type()) {
392         case relocInfo::static_call_type:
393           return iter.static_call_reloc()->static_stub();
394         // We check here for opt_virtual_call_type, since we reuse the code
395         // from the CompiledIC implementation
396         case relocInfo::opt_virtual_call_type:
397           return iter.opt_virtual_call_reloc()->static_stub();
398         default:
399           ShouldNotReachHere();
400       }
401     }
402   }
403   return nullptr;
404 }
405 
406 address CompiledDirectCall::find_stub() {
407   return find_stub_for(instruction_address());
408 }
409 
410 #ifndef PRODUCT
411 void CompiledDirectCall::print() {
412   tty->print("direct call at " INTPTR_FORMAT " to " INTPTR_FORMAT " -> ", p2i(instruction_address()), p2i(destination()));
413   if (is_clean()) {
414     tty->print("clean");
415   } else if (is_call_to_compiled()) {
416     tty->print("compiled");
417   } else if (is_call_to_interpreted()) {
418     tty->print("interpreted");
419   }
420   tty->cr();
421 }
422 
423 void CompiledDirectCall::verify_mt_safe(const methodHandle& callee, address entry,
424                                         NativeMovConstReg* method_holder,
425                                         NativeJump* jump) {
426   _call->verify();
427   // A generated lambda form might be deleted from the Lambdaform
428   // cache in MethodTypeForm.  If a jit compiled lambdaform method
429   // becomes not entrant and the cache access returns null, the new
430   // resolve will lead to a new generated LambdaForm.
431   Method* old_method = reinterpret_cast<Method*>(method_holder->data());
432   assert(old_method == nullptr || old_method == callee() ||
433          callee->is_compiled_lambda_form() ||
434          !old_method->method_holder()->is_loader_alive() ||
435          old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
436          "a) MT-unsafe modification of inline cache");
437 
438   address destination = jump->jump_destination();
439   assert(destination == (address)-1 || destination == entry
440          || old_method == nullptr || !old_method->method_holder()->is_loader_alive() // may have a race due to class unloading.
441          || old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
442          "b) MT-unsafe modification of inline cache");
443 }
444 #endif