1 /*
  2  * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "code/codeBehaviours.hpp"
 27 #include "code/codeCache.hpp"
 28 #include "code/compiledIC.hpp"
 29 #include "code/nmethod.hpp"
 30 #include "code/vtableStubs.hpp"
 31 #include "memory/resourceArea.hpp"
 32 #include "memory/universe.hpp"
 33 #include "oops/compressedKlass.hpp"
 34 #include "oops/klass.inline.hpp"
 35 #include "oops/method.inline.hpp"
 36 #include "runtime/atomic.hpp"
 37 #include "runtime/continuationEntry.hpp"
 38 #include "runtime/handles.inline.hpp"
 39 #include "runtime/interfaceSupport.inline.hpp"
 40 #include "runtime/sharedRuntime.hpp"
 41 #include "sanitizers/leak.hpp"
 42 
 43 
 44 // Every time a compiled IC is changed or its type is being accessed,
 45 // either the CompiledIC_lock must be set or we must be at a safe point.
 46 
 47 CompiledICLocker::CompiledICLocker(nmethod* method)
 48   : _method(method),
 49     _behaviour(CompiledICProtectionBehaviour::current()),
 50     _locked(_behaviour->lock(_method)) {
 51 }
 52 
 53 CompiledICLocker::~CompiledICLocker() {
 54   if (_locked) {
 55     _behaviour->unlock(_method);
 56   }
 57 }
 58 
 59 bool CompiledICLocker::is_safe(nmethod* method) {
 60   return CompiledICProtectionBehaviour::current()->is_safe(method);
 61 }
 62 
 63 bool CompiledICLocker::is_safe(address code) {
 64   CodeBlob* cb = CodeCache::find_blob(code);
 65   assert(cb != nullptr && cb->is_nmethod(), "must be compiled");
 66   nmethod* nm = cb->as_nmethod();
 67   return CompiledICProtectionBehaviour::current()->is_safe(nm);
 68 }
 69 
 70 CompiledICData::CompiledICData()
 71   : _speculated_method(),
 72     _speculated_klass(),
 73     _itable_defc_klass(),
 74     _itable_refc_klass(),
 75     _is_initialized() {}
 76 
 77 // Inline cache callsite info is initialized once the first time it is resolved
 78 void CompiledICData::initialize(CallInfo* call_info, Klass* receiver_klass) {
 79   _speculated_method = call_info->selected_method();
 80   if (UseCompressedClassPointers) {
 81     _speculated_klass = (uintptr_t)CompressedKlassPointers::encode_not_null(receiver_klass);
 82   } else {
 83     _speculated_klass = (uintptr_t)receiver_klass;
 84   }
 85   if (call_info->call_kind() == CallInfo::itable_call) {
 86     assert(call_info->resolved_method() != nullptr, "virtual or interface method must be found");
 87     _itable_defc_klass = call_info->resolved_method()->method_holder();
 88     _itable_refc_klass = call_info->resolved_klass();
 89   }
 90   _is_initialized = true;
 91 }
 92 
 93 bool CompiledICData::is_speculated_klass_unloaded() const {
 94   return is_initialized() && _speculated_klass == 0;
 95 }
 96 
 97 void CompiledICData::clean_metadata() {
 98   if (!is_initialized() || is_speculated_klass_unloaded()) {
 99     return;
100   }
101 
102   // GC cleaning doesn't need to change the state of the inline cache,
103   // only nuke stale speculated metadata if it gets unloaded. If the
104   // inline cache is monomorphic, the unverified entries will miss, and
105   // subsequent miss handlers will upgrade the callsite to megamorphic,
106   // which makes sense as it obviously is megamorphic then.
107   if (!speculated_klass()->is_loader_alive()) {
108     Atomic::store(&_speculated_klass, (uintptr_t)0);
109     Atomic::store(&_speculated_method, (Method*)nullptr);
110   }
111 
112   assert(_speculated_method == nullptr || _speculated_method->method_holder()->is_loader_alive(),
113          "Speculated method is not unloaded despite class being unloaded");
114 }
115 
116 void CompiledICData::metadata_do(MetadataClosure* cl) {
117   if (!is_initialized()) {
118     return;
119   }
120 
121   if (!is_speculated_klass_unloaded()) {
122     cl->do_metadata(_speculated_method);
123     cl->do_metadata(speculated_klass());
124   }
125   if (_itable_refc_klass != nullptr) {
126     cl->do_metadata(_itable_refc_klass);
127   }
128   if (_itable_defc_klass != nullptr) {
129     cl->do_metadata(_itable_defc_klass);
130   }
131 }
132 
133 Klass* CompiledICData::speculated_klass() const {
134   if (is_speculated_klass_unloaded()) {
135     return nullptr;
136   }
137 
138   if (UseCompressedClassPointers) {
139     return CompressedKlassPointers::decode_not_null((narrowKlass)_speculated_klass);
140   } else {
141     return (Klass*)_speculated_klass;
142   }
143 }
144 
145 //-----------------------------------------------------------------------------
146 // High-level access to an inline cache. Guaranteed to be MT-safe.
147 
148 CompiledICData* CompiledIC::data() const {
149   return _data;
150 }
151 
152 CompiledICData* data_from_reloc_iter(RelocIterator* iter) {
153   assert(iter->type() == relocInfo::virtual_call_type, "wrong reloc. info");
154 
155   virtual_call_Relocation* r = iter->virtual_call_reloc();
156   NativeMovConstReg* value = nativeMovConstReg_at(r->cached_value());
157 
158   return (CompiledICData*)value->data();
159 }
160 
161 CompiledIC::CompiledIC(RelocIterator* iter)
162   : _method(iter->code()),
163     _data(data_from_reloc_iter(iter)),
164     _call(nativeCall_at(iter->addr()))
165 {
166   assert(_method != nullptr, "must pass compiled method");
167   assert(_method->contains(iter->addr()), "must be in compiled method");
168   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
169 }
170 
171 CompiledIC* CompiledIC_before(nmethod* nm, address return_addr) {
172   address call_site = nativeCall_before(return_addr)->instruction_address();
173   return CompiledIC_at(nm, call_site);
174 }
175 
176 CompiledIC* CompiledIC_at(nmethod* nm, address call_site) {
177   RelocIterator iter(nm, call_site, call_site + 1);
178   iter.next();
179   return CompiledIC_at(&iter);
180 }
181 
182 CompiledIC* CompiledIC_at(Relocation* call_reloc) {
183   address call_site = call_reloc->addr();
184   nmethod* nm = CodeCache::find_blob(call_reloc->addr())->as_nmethod();
185   return CompiledIC_at(nm, call_site);
186 }
187 
188 CompiledIC* CompiledIC_at(RelocIterator* reloc_iter) {
189   CompiledIC* c_ic = new CompiledIC(reloc_iter);
190   c_ic->verify();
191   return c_ic;
192 }
193 
194 void CompiledIC::ensure_initialized(CallInfo* call_info, Klass* receiver_klass) {
195   if (!_data->is_initialized()) {
196     _data->initialize(call_info, receiver_klass);
197   }
198 }
199 
200 void CompiledIC::set_to_clean() {
201   log_debug(inlinecache)("IC@" INTPTR_FORMAT ": set to clean", p2i(_call->instruction_address()));
202   _call->set_destination_mt_safe(SharedRuntime::get_resolve_virtual_call_stub());
203 }
204 
205 void CompiledIC::set_to_monomorphic(bool caller_is_c1) {
206   assert(data()->is_initialized(), "must be initialized");
207   Method* method = data()->speculated_method();
208   nmethod* code = method->code();
209   address entry;
210   bool to_compiled = code != nullptr && code->is_in_use() && !code->is_unloading();
211 
212   if (to_compiled) {
213     entry = caller_is_c1 ? code->inline_entry_point() : code->entry_point();
214   } else {
215     entry = caller_is_c1 ? method->get_c2i_unverified_inline_entry() : method->get_c2i_unverified_entry();
216   }
217 
218   log_trace(inlinecache)("IC@" INTPTR_FORMAT ": monomorphic to %s: %s",
219                          p2i(_call->instruction_address()),
220                          to_compiled ? "compiled" : "interpreter",
221                          method->print_value_string());
222 
223   _call->set_destination_mt_safe(entry);
224 }
225 
226 void CompiledIC::set_to_megamorphic(CallInfo* call_info, bool caller_is_c1) {
227   assert(data()->is_initialized(), "must be initialized");
228 
229   address entry;
230   if (call_info->call_kind() == CallInfo::direct_call) {
231     // C1 sometimes compiles a callsite before the target method is loaded, resulting in
232     // dynamically bound callsites that should really be statically bound. However, the
233     // target method might not have a vtable or itable. We just wait for better code to arrive
234     return;
235   } else if (call_info->call_kind() == CallInfo::itable_call) {
236     int itable_index = call_info->itable_index();
237     entry = VtableStubs::find_itable_stub(itable_index, caller_is_c1);
238     if (entry == nullptr) {
239       return;
240     }
241 #ifdef ASSERT
242     assert(call_info->resolved_method() != nullptr, "virtual or interface method must be found");
243     int index = call_info->resolved_method()->itable_index();
244     assert(index == itable_index, "CallInfo pre-computes this");
245     InstanceKlass* k = call_info->resolved_method()->method_holder();
246     assert(k->verify_itable_index(itable_index), "sanity check");
247 #endif //ASSERT
248   } else {
249     assert(call_info->call_kind() == CallInfo::vtable_call, "what else?");
250     // Can be different than selected_method->vtable_index(), due to package-private etc.
251     int vtable_index = call_info->vtable_index();
252     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
253     entry = VtableStubs::find_vtable_stub(vtable_index, caller_is_c1);
254     if (entry == nullptr) {
255       return;
256     }
257   }
258 
259   assert(call_info->selected_method() != nullptr, "virtual or interface method must be found");
260   log_trace(inlinecache)("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
261                          p2i(_call->instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
262 
263   _call->set_destination_mt_safe(entry);
264   assert(is_megamorphic(), "sanity check");
265 }
266 
267 void CompiledIC::update(CallInfo* call_info, Klass* receiver_klass, bool caller_is_c1) {
268   // If this is the first time we fix the inline cache, we ensure it's initialized
269   ensure_initialized(call_info, receiver_klass);
270 
271   if (is_megamorphic()) {
272     // Terminal state for the inline cache
273     return;
274   }
275 
276   if (is_speculated_klass(receiver_klass)) {
277     // If the speculated class matches the receiver klass, we can speculate that will
278     // continue to be the case with a monomorphic inline cache
279     set_to_monomorphic(caller_is_c1);
280   } else {
281     // If the dynamic type speculation fails, we try to transform to a megamorphic state
282     // for the inline cache using stubs to dispatch in tables
283     set_to_megamorphic(call_info, caller_is_c1);
284   }
285 }
286 
287 bool CompiledIC::is_clean() const {
288   return destination() == SharedRuntime::get_resolve_virtual_call_stub();
289 }
290 
291 bool CompiledIC::is_monomorphic() const {
292   return !is_clean() && !is_megamorphic();
293 }
294 
295 bool CompiledIC::is_megamorphic() const {
296   return VtableStubs::entry_point(destination()) != nullptr;
297 }
298 
299 bool CompiledIC::is_speculated_klass(Klass* receiver_klass) {
300   return data()->speculated_klass() == receiver_klass;
301 }
302 
303 // GC support
304 void CompiledIC::clean_metadata() {
305   data()->clean_metadata();
306 }
307 
308 void CompiledIC::metadata_do(MetadataClosure* cl) {
309   data()->metadata_do(cl);
310 }
311 
312 #ifndef PRODUCT
313 void CompiledIC::print() {
314   tty->print("Inline cache at " INTPTR_FORMAT ", calling " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
315              p2i(instruction_address()), p2i(destination()), p2i(data()));
316   tty->cr();
317 }
318 
319 void CompiledIC::verify() {
320   _call->verify();
321 }
322 #endif
323 
324 // ----------------------------------------------------------------------------
325 
326 void CompiledDirectCall::set_to_clean() {
327   // in_use is unused but needed to match template function in nmethod
328   assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
329   // Reset call site
330   RelocIterator iter((nmethod*)nullptr, instruction_address(), instruction_address() + 1);
331   while (iter.next()) {
332     switch(iter.type()) {
333     case relocInfo::static_call_type:
334       _call->set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
335       break;
336     case relocInfo::opt_virtual_call_type:
337       _call->set_destination_mt_safe(SharedRuntime::get_resolve_opt_virtual_call_stub());
338       break;
339     default:
340       ShouldNotReachHere();
341     }
342   }
343   assert(is_clean(), "should be clean after cleaning");
344 
345   log_debug(inlinecache)("DC@" INTPTR_FORMAT ": set to clean", p2i(_call->instruction_address()));
346 }
347 
348 void CompiledDirectCall::set(const methodHandle& callee_method, bool caller_is_c1) {
349   nmethod* code = callee_method->code();
350   nmethod* caller = CodeCache::find_nmethod(instruction_address());
351   assert(caller != nullptr, "did not find caller nmethod");
352 
353   bool to_interp_cont_enter = caller->method()->is_continuation_enter_intrinsic() &&
354                               ContinuationEntry::is_interpreted_call(instruction_address());
355 
356   bool to_compiled = !to_interp_cont_enter && code != nullptr && code->is_in_use() && !code->is_unloading();
357 
358   if (to_compiled) {
359     _call->set_destination_mt_safe(caller_is_c1 ? code->verified_inline_entry_point() : code->verified_entry_point());
360     assert(is_call_to_compiled(), "should be compiled after set to compiled");
361   } else {
362     // Patch call site to C2I adapter if code is deoptimized or unloaded.
363     // We also need to patch the static call stub to set the rmethod register
364     // to the callee_method so the c2i adapter knows how to build the frame
365     set_to_interpreted(callee_method, caller_is_c1 ? callee_method->get_c2i_inline_entry() : callee_method->get_c2i_entry());
366     assert(is_call_to_interpreted(), "should be interpreted after set to interpreted");
367   }
368 
369   log_trace(inlinecache)("DC@" INTPTR_FORMAT ": set to %s: %s: " INTPTR_FORMAT,
370                          p2i(_call->instruction_address()),
371                          to_compiled ? "compiled" : "interpreter",
372                          callee_method->print_value_string(),
373                          p2i(_call->destination()));
374 }
375 
376 bool CompiledDirectCall::is_clean() const {
377   return destination() == SharedRuntime::get_resolve_static_call_stub() ||
378          destination() == SharedRuntime::get_resolve_opt_virtual_call_stub();
379 }
380 
381 bool CompiledDirectCall::is_call_to_interpreted() const {
382   // It is a call to interpreted, if it calls to a stub. Hence, the destination
383   // must be in the stub part of the nmethod that contains the call
384   nmethod* nm = CodeCache::find_nmethod(instruction_address());
385   assert(nm != nullptr, "did not find nmethod");
386   return nm->stub_contains(destination());
387 }
388 
389 bool CompiledDirectCall::is_call_to_compiled() const {
390   nmethod* caller = CodeCache::find_nmethod(instruction_address());
391   assert(caller != nullptr, "did not find caller nmethod");
392   CodeBlob* dest_cb = CodeCache::find_blob(destination());
393   return !caller->stub_contains(destination()) && dest_cb->is_nmethod();
394 }
395 
396 address CompiledDirectCall::find_stub_for(address instruction) {
397   // Find reloc. information containing this call-site
398   RelocIterator iter((nmethod*)nullptr, instruction);
399   while (iter.next()) {
400     if (iter.addr() == instruction) {
401       switch(iter.type()) {
402         case relocInfo::static_call_type:
403           return iter.static_call_reloc()->static_stub();
404         // We check here for opt_virtual_call_type, since we reuse the code
405         // from the CompiledIC implementation
406         case relocInfo::opt_virtual_call_type:
407           return iter.opt_virtual_call_reloc()->static_stub();
408         default:
409           ShouldNotReachHere();
410       }
411     }
412   }
413   return nullptr;
414 }
415 
416 address CompiledDirectCall::find_stub() {
417   return find_stub_for(instruction_address());
418 }
419 
420 #ifndef PRODUCT
421 void CompiledDirectCall::print() {
422   tty->print("direct call at " INTPTR_FORMAT " to " INTPTR_FORMAT " -> ", p2i(instruction_address()), p2i(destination()));
423   if (is_clean()) {
424     tty->print("clean");
425   } else if (is_call_to_compiled()) {
426     tty->print("compiled");
427   } else if (is_call_to_interpreted()) {
428     tty->print("interpreted");
429   }
430   tty->cr();
431 }
432 
433 void CompiledDirectCall::verify_mt_safe(const methodHandle& callee, address entry,
434                                         NativeMovConstReg* method_holder,
435                                         NativeJump* jump) {
436   _call->verify();
437   // A generated lambda form might be deleted from the Lambdaform
438   // cache in MethodTypeForm.  If a jit compiled lambdaform method
439   // becomes not entrant and the cache access returns null, the new
440   // resolve will lead to a new generated LambdaForm.
441   Method* old_method = reinterpret_cast<Method*>(method_holder->data());
442   assert(old_method == nullptr || old_method == callee() ||
443          callee->is_compiled_lambda_form() ||
444          !old_method->method_holder()->is_loader_alive() ||
445          old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
446          "a) MT-unsafe modification of inline cache");
447 
448   address destination = jump->jump_destination();
449   assert(destination == (address)-1 || destination == entry
450          || old_method == nullptr || !old_method->method_holder()->is_loader_alive() // may have a race due to class unloading.
451          || old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
452          "b) MT-unsafe modification of inline cache");
453 }
454 #endif