1 /*
  2  * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "code/codeBehaviours.hpp"
 26 #include "code/codeCache.hpp"
 27 #include "code/compiledIC.hpp"
 28 #include "code/nmethod.hpp"
 29 #include "code/vtableStubs.hpp"
 30 #include "memory/resourceArea.hpp"
 31 #include "memory/universe.hpp"
 32 #include "oops/compressedKlass.hpp"
 33 #include "oops/klass.inline.hpp"
 34 #include "oops/method.inline.hpp"
 35 #include "runtime/atomic.hpp"
 36 #include "runtime/continuationEntry.hpp"
 37 #include "runtime/handles.inline.hpp"
 38 #include "runtime/interfaceSupport.inline.hpp"
 39 #include "runtime/sharedRuntime.hpp"
 40 #include "sanitizers/leak.hpp"
 41 
 42 
 43 // Every time a compiled IC is changed or its type is being accessed,
 44 // either the CompiledIC_lock must be set or we must be at a safe point.
 45 
 46 CompiledICLocker::CompiledICLocker(nmethod* method)
 47   : _method(method),
 48     _behaviour(CompiledICProtectionBehaviour::current()),
 49     _locked(_behaviour->lock(_method)) {
 50 }
 51 
 52 CompiledICLocker::~CompiledICLocker() {
 53   if (_locked) {
 54     _behaviour->unlock(_method);
 55   }
 56 }
 57 
 58 bool CompiledICLocker::is_safe(nmethod* method) {
 59   return CompiledICProtectionBehaviour::current()->is_safe(method);
 60 }
 61 
 62 bool CompiledICLocker::is_safe(address code) {
 63   CodeBlob* cb = CodeCache::find_blob(code);
 64   assert(cb != nullptr && cb->is_nmethod(), "must be compiled");
 65   nmethod* nm = cb->as_nmethod();
 66   return CompiledICProtectionBehaviour::current()->is_safe(nm);
 67 }
 68 
 69 CompiledICData::CompiledICData()
 70   : _speculated_method(),
 71     _speculated_klass(),
 72     _itable_defc_klass(),
 73     _itable_refc_klass(),
 74     _is_initialized() {}
 75 
 76 // Inline cache callsite info is initialized once the first time it is resolved
 77 void CompiledICData::initialize(CallInfo* call_info, Klass* receiver_klass) {
 78   _speculated_method = call_info->selected_method();
 79   if (UseCompressedClassPointers) {
 80     _speculated_klass = (uintptr_t)CompressedKlassPointers::encode_not_null(receiver_klass);
 81   } else {
 82     _speculated_klass = (uintptr_t)receiver_klass;
 83   }
 84   if (call_info->call_kind() == CallInfo::itable_call) {
 85     assert(call_info->resolved_method() != nullptr, "virtual or interface method must be found");
 86     _itable_defc_klass = call_info->resolved_method()->method_holder();
 87     _itable_refc_klass = call_info->resolved_klass();
 88   }
 89   _is_initialized = true;
 90 }
 91 
 92 bool CompiledICData::is_speculated_klass_unloaded() const {
 93   return is_initialized() && _speculated_klass == 0;
 94 }
 95 
 96 void CompiledICData::clean_metadata() {
 97   if (!is_initialized() || is_speculated_klass_unloaded()) {
 98     return;
 99   }
100 
101   // GC cleaning doesn't need to change the state of the inline cache,
102   // only nuke stale speculated metadata if it gets unloaded. If the
103   // inline cache is monomorphic, the unverified entries will miss, and
104   // subsequent miss handlers will upgrade the callsite to megamorphic,
105   // which makes sense as it obviously is megamorphic then.
106   if (!speculated_klass()->is_loader_alive()) {
107     Atomic::store(&_speculated_klass, (uintptr_t)0);
108     Atomic::store(&_speculated_method, (Method*)nullptr);
109   }
110 
111   assert(_speculated_method == nullptr || _speculated_method->method_holder()->is_loader_alive(),
112          "Speculated method is not unloaded despite class being unloaded");
113 }
114 
115 void CompiledICData::metadata_do(MetadataClosure* cl) {
116   if (!is_initialized()) {
117     return;
118   }
119 
120   if (!is_speculated_klass_unloaded()) {
121     cl->do_metadata(_speculated_method);
122     cl->do_metadata(speculated_klass());
123   }
124   if (_itable_refc_klass != nullptr) {
125     cl->do_metadata(_itable_refc_klass);
126   }
127   if (_itable_defc_klass != nullptr) {
128     cl->do_metadata(_itable_defc_klass);
129   }
130 }
131 
132 Klass* CompiledICData::speculated_klass() const {
133   if (is_speculated_klass_unloaded()) {
134     return nullptr;
135   }
136 
137   if (UseCompressedClassPointers) {
138     return CompressedKlassPointers::decode_not_null((narrowKlass)_speculated_klass);
139   } else {
140     return (Klass*)_speculated_klass;
141   }
142 }
143 
144 //-----------------------------------------------------------------------------
145 // High-level access to an inline cache. Guaranteed to be MT-safe.
146 
147 CompiledICData* CompiledIC::data() const {
148   return _data;
149 }
150 
151 CompiledICData* data_from_reloc_iter(RelocIterator* iter) {
152   assert(iter->type() == relocInfo::virtual_call_type, "wrong reloc. info");
153 
154   virtual_call_Relocation* r = iter->virtual_call_reloc();
155   NativeMovConstReg* value = nativeMovConstReg_at(r->cached_value());
156 
157   return (CompiledICData*)value->data();
158 }
159 
160 CompiledIC::CompiledIC(RelocIterator* iter)
161   : _method(iter->code()),
162     _data(data_from_reloc_iter(iter)),
163     _call(nativeCall_at(iter->addr()))
164 {
165   assert(_method != nullptr, "must pass compiled method");
166   assert(_method->contains(iter->addr()), "must be in compiled method");
167   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
168 }
169 
170 CompiledIC* CompiledIC_before(nmethod* nm, address return_addr) {
171   address call_site = nativeCall_before(return_addr)->instruction_address();
172   return CompiledIC_at(nm, call_site);
173 }
174 
175 CompiledIC* CompiledIC_at(nmethod* nm, address call_site) {
176   RelocIterator iter(nm, call_site, call_site + 1);
177   iter.next();
178   return CompiledIC_at(&iter);
179 }
180 
181 CompiledIC* CompiledIC_at(Relocation* call_reloc) {
182   address call_site = call_reloc->addr();
183   nmethod* nm = CodeCache::find_blob(call_reloc->addr())->as_nmethod();
184   return CompiledIC_at(nm, call_site);
185 }
186 
187 CompiledIC* CompiledIC_at(RelocIterator* reloc_iter) {
188   CompiledIC* c_ic = new CompiledIC(reloc_iter);
189   c_ic->verify();
190   return c_ic;
191 }
192 
193 void CompiledIC::ensure_initialized(CallInfo* call_info, Klass* receiver_klass) {
194   if (!_data->is_initialized()) {
195     _data->initialize(call_info, receiver_klass);
196   }
197 }
198 
199 void CompiledIC::set_to_clean() {
200   log_debug(inlinecache)("IC@" INTPTR_FORMAT ": set to clean", p2i(_call->instruction_address()));
201   _call->set_destination_mt_safe(SharedRuntime::get_resolve_virtual_call_stub());
202 }
203 
204 void CompiledIC::set_to_monomorphic(bool caller_is_c1) {
205   assert(data()->is_initialized(), "must be initialized");
206   Method* method = data()->speculated_method();
207   nmethod* code = method->code();
208   address entry;
209   bool to_compiled = code != nullptr && code->is_in_use() && !code->is_unloading();
210 
211   if (to_compiled) {
212     entry = caller_is_c1 ? code->inline_entry_point() : code->entry_point();
213   } else {
214     entry = caller_is_c1 ? method->get_c2i_unverified_inline_entry() : method->get_c2i_unverified_entry();
215   }
216 
217   log_trace(inlinecache)("IC@" INTPTR_FORMAT ": monomorphic to %s: %s",
218                          p2i(_call->instruction_address()),
219                          to_compiled ? "compiled" : "interpreter",
220                          method->print_value_string());
221 
222   _call->set_destination_mt_safe(entry);
223 }
224 
225 void CompiledIC::set_to_megamorphic(CallInfo* call_info, bool caller_is_c1) {
226   assert(data()->is_initialized(), "must be initialized");
227 
228   address entry;
229   if (call_info->call_kind() == CallInfo::direct_call) {
230     // C1 sometimes compiles a callsite before the target method is loaded, resulting in
231     // dynamically bound callsites that should really be statically bound. However, the
232     // target method might not have a vtable or itable. We just wait for better code to arrive
233     return;
234   } else if (call_info->call_kind() == CallInfo::itable_call) {
235     int itable_index = call_info->itable_index();
236     entry = VtableStubs::find_itable_stub(itable_index, caller_is_c1);
237     if (entry == nullptr) {
238       return;
239     }
240 #ifdef ASSERT
241     assert(call_info->resolved_method() != nullptr, "virtual or interface method must be found");
242     int index = call_info->resolved_method()->itable_index();
243     assert(index == itable_index, "CallInfo pre-computes this");
244     InstanceKlass* k = call_info->resolved_method()->method_holder();
245     assert(k->verify_itable_index(itable_index), "sanity check");
246 #endif //ASSERT
247   } else {
248     assert(call_info->call_kind() == CallInfo::vtable_call, "what else?");
249     // Can be different than selected_method->vtable_index(), due to package-private etc.
250     int vtable_index = call_info->vtable_index();
251     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
252     entry = VtableStubs::find_vtable_stub(vtable_index, caller_is_c1);
253     if (entry == nullptr) {
254       return;
255     }
256   }
257 
258   assert(call_info->selected_method() != nullptr, "virtual or interface method must be found");
259   log_trace(inlinecache)("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
260                          p2i(_call->instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
261 
262   _call->set_destination_mt_safe(entry);
263   assert(is_megamorphic(), "sanity check");
264 }
265 
266 void CompiledIC::update(CallInfo* call_info, Klass* receiver_klass, bool caller_is_c1) {
267   // If this is the first time we fix the inline cache, we ensure it's initialized
268   ensure_initialized(call_info, receiver_klass);
269 
270   if (is_megamorphic()) {
271     // Terminal state for the inline cache
272     return;
273   }
274 
275   if (is_speculated_klass(receiver_klass)) {
276     // If the speculated class matches the receiver klass, we can speculate that will
277     // continue to be the case with a monomorphic inline cache
278     set_to_monomorphic(caller_is_c1);
279   } else {
280     // If the dynamic type speculation fails, we try to transform to a megamorphic state
281     // for the inline cache using stubs to dispatch in tables
282     set_to_megamorphic(call_info, caller_is_c1);
283   }
284 }
285 
286 bool CompiledIC::is_clean() const {
287   return destination() == SharedRuntime::get_resolve_virtual_call_stub();
288 }
289 
290 bool CompiledIC::is_monomorphic() const {
291   return !is_clean() && !is_megamorphic();
292 }
293 
294 bool CompiledIC::is_megamorphic() const {
295   return VtableStubs::entry_point(destination()) != nullptr;
296 }
297 
298 bool CompiledIC::is_speculated_klass(Klass* receiver_klass) {
299   return data()->speculated_klass() == receiver_klass;
300 }
301 
302 // GC support
303 void CompiledIC::clean_metadata() {
304   data()->clean_metadata();
305 }
306 
307 void CompiledIC::metadata_do(MetadataClosure* cl) {
308   data()->metadata_do(cl);
309 }
310 
311 #ifndef PRODUCT
312 void CompiledIC::print() {
313   tty->print("Inline cache at " INTPTR_FORMAT ", calling " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
314              p2i(instruction_address()), p2i(destination()), p2i(data()));
315   tty->cr();
316 }
317 
318 void CompiledIC::verify() {
319   _call->verify();
320 }
321 #endif
322 
323 // ----------------------------------------------------------------------------
324 
325 void CompiledDirectCall::set_to_clean() {
326   // in_use is unused but needed to match template function in nmethod
327   assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
328   // Reset call site
329   RelocIterator iter((nmethod*)nullptr, instruction_address(), instruction_address() + 1);
330   while (iter.next()) {
331     switch(iter.type()) {
332     case relocInfo::static_call_type:
333       _call->set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
334       break;
335     case relocInfo::opt_virtual_call_type:
336       _call->set_destination_mt_safe(SharedRuntime::get_resolve_opt_virtual_call_stub());
337       break;
338     default:
339       ShouldNotReachHere();
340     }
341   }
342   assert(is_clean(), "should be clean after cleaning");
343 
344   log_debug(inlinecache)("DC@" INTPTR_FORMAT ": set to clean", p2i(_call->instruction_address()));
345 }
346 
347 void CompiledDirectCall::set(const methodHandle& callee_method, bool caller_is_c1) {
348   nmethod* code = callee_method->code();
349   nmethod* caller = CodeCache::find_nmethod(instruction_address());
350   assert(caller != nullptr, "did not find caller nmethod");
351 
352   bool to_interp_cont_enter = caller->method()->is_continuation_enter_intrinsic() &&
353                               ContinuationEntry::is_interpreted_call(instruction_address());
354 
355   bool to_compiled = !to_interp_cont_enter && code != nullptr && code->is_in_use() && !code->is_unloading();
356 
357   if (to_compiled) {
358     _call->set_destination_mt_safe(caller_is_c1 ? code->verified_inline_entry_point() : code->verified_entry_point());
359     assert(is_call_to_compiled(), "should be compiled after set to compiled");
360   } else {
361     // Patch call site to C2I adapter if code is deoptimized or unloaded.
362     // We also need to patch the static call stub to set the rmethod register
363     // to the callee_method so the c2i adapter knows how to build the frame
364     set_to_interpreted(callee_method, caller_is_c1 ? callee_method->get_c2i_inline_entry() : callee_method->get_c2i_entry());
365     assert(is_call_to_interpreted(), "should be interpreted after set to interpreted");
366   }
367 
368   log_trace(inlinecache)("DC@" INTPTR_FORMAT ": set to %s: %s: " INTPTR_FORMAT,
369                          p2i(_call->instruction_address()),
370                          to_compiled ? "compiled" : "interpreter",
371                          callee_method->print_value_string(),
372                          p2i(_call->destination()));
373 }
374 
375 bool CompiledDirectCall::is_clean() const {
376   return destination() == SharedRuntime::get_resolve_static_call_stub() ||
377          destination() == SharedRuntime::get_resolve_opt_virtual_call_stub();
378 }
379 
380 bool CompiledDirectCall::is_call_to_interpreted() const {
381   // It is a call to interpreted, if it calls to a stub. Hence, the destination
382   // must be in the stub part of the nmethod that contains the call
383   nmethod* nm = CodeCache::find_nmethod(instruction_address());
384   assert(nm != nullptr, "did not find nmethod");
385   return nm->stub_contains(destination());
386 }
387 
388 bool CompiledDirectCall::is_call_to_compiled() const {
389   nmethod* caller = CodeCache::find_nmethod(instruction_address());
390   assert(caller != nullptr, "did not find caller nmethod");
391   CodeBlob* dest_cb = CodeCache::find_blob(destination());
392   return !caller->stub_contains(destination()) && dest_cb->is_nmethod();
393 }
394 
395 address CompiledDirectCall::find_stub_for(address instruction) {
396   // Find reloc. information containing this call-site
397   RelocIterator iter((nmethod*)nullptr, instruction);
398   while (iter.next()) {
399     if (iter.addr() == instruction) {
400       switch(iter.type()) {
401         case relocInfo::static_call_type:
402           return iter.static_call_reloc()->static_stub();
403         // We check here for opt_virtual_call_type, since we reuse the code
404         // from the CompiledIC implementation
405         case relocInfo::opt_virtual_call_type:
406           return iter.opt_virtual_call_reloc()->static_stub();
407         default:
408           ShouldNotReachHere();
409       }
410     }
411   }
412   return nullptr;
413 }
414 
415 address CompiledDirectCall::find_stub() {
416   return find_stub_for(instruction_address());
417 }
418 
419 #ifndef PRODUCT
420 void CompiledDirectCall::print() {
421   tty->print("direct call at " INTPTR_FORMAT " to " INTPTR_FORMAT " -> ", p2i(instruction_address()), p2i(destination()));
422   if (is_clean()) {
423     tty->print("clean");
424   } else if (is_call_to_compiled()) {
425     tty->print("compiled");
426   } else if (is_call_to_interpreted()) {
427     tty->print("interpreted");
428   }
429   tty->cr();
430 }
431 
432 void CompiledDirectCall::verify_mt_safe(const methodHandle& callee, address entry,
433                                         NativeMovConstReg* method_holder,
434                                         NativeJump* jump) {
435   _call->verify();
436   // A generated lambda form might be deleted from the Lambdaform
437   // cache in MethodTypeForm.  If a jit compiled lambdaform method
438   // becomes not entrant and the cache access returns null, the new
439   // resolve will lead to a new generated LambdaForm.
440   Method* old_method = reinterpret_cast<Method*>(method_holder->data());
441   assert(old_method == nullptr || old_method == callee() ||
442          callee->is_compiled_lambda_form() ||
443          !old_method->method_holder()->is_loader_alive() ||
444          old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
445          "a) MT-unsafe modification of inline cache");
446 
447   address destination = jump->jump_destination();
448   assert(destination == (address)-1 || destination == entry
449          || old_method == nullptr || !old_method->method_holder()->is_loader_alive() // may have a race due to class unloading.
450          || old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
451          "b) MT-unsafe modification of inline cache");
452 }
453 #endif