1 /*
  2  * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "code/codeBehaviours.hpp"
 27 #include "code/codeCache.hpp"
 28 #include "code/compiledIC.hpp"
 29 #include "code/nmethod.hpp"
 30 #include "code/vtableStubs.hpp"
 31 #include "memory/resourceArea.hpp"
 32 #include "memory/universe.hpp"
 33 #include "oops/compressedKlass.hpp"
 34 #include "oops/klass.inline.hpp"
 35 #include "oops/method.inline.hpp"
 36 #include "runtime/atomic.hpp"
 37 #include "runtime/continuationEntry.hpp"
 38 #include "runtime/handles.inline.hpp"
 39 #include "runtime/interfaceSupport.inline.hpp"
 40 #include "runtime/sharedRuntime.hpp"
 41 #include "sanitizers/leak.hpp"
 42 
 43 
 44 // Every time a compiled IC is changed or its type is being accessed,
 45 // either the CompiledIC_lock must be set or we must be at a safe point.
 46 
 47 CompiledICLocker::CompiledICLocker(CompiledMethod* method)
 48   : _method(method),
 49     _behaviour(CompiledICProtectionBehaviour::current()),
 50     _locked(_behaviour->lock(_method)) {
 51 }
 52 
 53 CompiledICLocker::~CompiledICLocker() {
 54   if (_locked) {
 55     _behaviour->unlock(_method);
 56   }
 57 }
 58 
 59 bool CompiledICLocker::is_safe(CompiledMethod* method) {
 60   return CompiledICProtectionBehaviour::current()->is_safe(method);
 61 }
 62 
 63 bool CompiledICLocker::is_safe(address code) {
 64   CodeBlob* cb = CodeCache::find_blob(code);
 65   assert(cb != nullptr && cb->is_compiled(), "must be compiled");
 66   CompiledMethod* cm = cb->as_compiled_method();
 67   return CompiledICProtectionBehaviour::current()->is_safe(cm);
 68 }
 69 
 70 CompiledICData::CompiledICData()
 71   : _speculated_method(),
 72     _speculated_klass(),
 73     _itable_defc_klass(),
 74     _itable_refc_klass(),
 75     _is_initialized() {}
 76 
 77 // Inline cache callsite info is initialized once the first time it is resolved
 78 void CompiledICData::initialize(CallInfo* call_info, Klass* receiver_klass) {
 79   _speculated_method = call_info->selected_method();
 80   if (UseCompressedClassPointers) {
 81     _speculated_klass = (uintptr_t)CompressedKlassPointers::encode_not_null(receiver_klass);
 82   } else {
 83     _speculated_klass = (uintptr_t)receiver_klass;
 84   }
 85   if (call_info->call_kind() == CallInfo::itable_call) {
 86     _itable_defc_klass = call_info->resolved_method()->method_holder();
 87     _itable_refc_klass = call_info->resolved_klass();
 88   }
 89   _is_initialized = true;
 90 }
 91 
 92 bool CompiledICData::is_speculated_klass_unloaded() const {
 93   return is_initialized() && _speculated_klass == 0;
 94 }
 95 
 96 void CompiledICData::clean_metadata() {
 97   if (!is_initialized() || is_speculated_klass_unloaded()) {
 98     return;
 99   }
100 
101   // GC cleaning doesn't need to change the state of the inline cache,
102   // only nuke stale speculated metadata if it gets unloaded. If the
103   // inline cache is monomorphic, the unverified entries will miss, and
104   // subsequent miss handlers will upgrade the callsite to megamorphic,
105   // which makes sense as it obviously is megamorphic then.
106   if (!speculated_klass()->is_loader_alive()) {
107     Atomic::store(&_speculated_klass, (uintptr_t)0);
108     Atomic::store(&_speculated_method, (Method*)nullptr);
109   }
110 
111   assert(_speculated_method == nullptr || _speculated_method->method_holder()->is_loader_alive(),
112          "Speculated method is not unloaded despite class being unloaded");
113 }
114 
115 void CompiledICData::metadata_do(MetadataClosure* cl) {
116   if (!is_initialized()) {
117     return;
118   }
119 
120   if (!is_speculated_klass_unloaded()) {
121     cl->do_metadata(_speculated_method);
122     cl->do_metadata(speculated_klass());
123   }
124   if (_itable_refc_klass != nullptr) {
125     cl->do_metadata(_itable_refc_klass);
126   }
127   if (_itable_defc_klass != nullptr) {
128     cl->do_metadata(_itable_defc_klass);
129   }
130 }
131 
132 Klass* CompiledICData::speculated_klass() const {
133   if (is_speculated_klass_unloaded()) {
134     return nullptr;
135   }
136 
137   if (UseCompressedClassPointers) {
138     return CompressedKlassPointers::decode_not_null((narrowKlass)_speculated_klass);
139   } else {
140     return (Klass*)_speculated_klass;
141   }
142 }
143 
144 //-----------------------------------------------------------------------------
145 // High-level access to an inline cache. Guaranteed to be MT-safe.
146 
147 CompiledICData* CompiledIC::data() const {
148   return _data;
149 }
150 
151 CompiledICData* data_from_reloc_iter(RelocIterator* iter) {
152   assert(iter->type() == relocInfo::virtual_call_type, "wrong reloc. info");
153 
154   virtual_call_Relocation* r = iter->virtual_call_reloc();
155   NativeMovConstReg* value = nativeMovConstReg_at(r->cached_value());
156 
157   return (CompiledICData*)value->data();
158 }
159 
160 CompiledIC::CompiledIC(RelocIterator* iter)
161   : _method(iter->code()),
162     _data(data_from_reloc_iter(iter)),
163     _call(nativeCall_at(iter->addr()))
164 {
165   assert(_method != nullptr, "must pass compiled method");
166   assert(_method->contains(iter->addr()), "must be in compiled method");
167   assert(CompiledICLocker::is_safe(_method), "mt unsafe call");
168 }
169 
170 CompiledIC* CompiledIC_before(CompiledMethod* nm, address return_addr) {
171   address call_site = nativeCall_before(return_addr)->instruction_address();
172   return CompiledIC_at(nm, call_site);
173 }
174 
175 CompiledIC* CompiledIC_at(CompiledMethod* nm, address call_site) {
176   RelocIterator iter(nm, call_site, call_site + 1);
177   iter.next();
178   return CompiledIC_at(&iter);
179 }
180 
181 CompiledIC* CompiledIC_at(Relocation* call_reloc) {
182   address call_site = call_reloc->addr();
183   CompiledMethod* cm = CodeCache::find_blob(call_reloc->addr())->as_compiled_method();
184   return CompiledIC_at(cm, call_site);
185 }
186 
187 CompiledIC* CompiledIC_at(RelocIterator* reloc_iter) {
188   CompiledIC* c_ic = new CompiledIC(reloc_iter);
189   c_ic->verify();
190   return c_ic;
191 }
192 
193 void CompiledIC::ensure_initialized(CallInfo* call_info, Klass* receiver_klass) {
194   if (!_data->is_initialized()) {
195     _data->initialize(call_info, receiver_klass);
196   }
197 }
198 
199 void CompiledIC::set_to_clean() {
200   log_debug(inlinecache)("IC@" INTPTR_FORMAT ": set to clean", p2i(_call->instruction_address()));
201   _call->set_destination_mt_safe(SharedRuntime::get_resolve_virtual_call_stub());
202 }
203 
204 void CompiledIC::set_to_monomorphic() {
205   assert(data()->is_initialized(), "must be initialized");
206   Method* method = data()->speculated_method();
207   CompiledMethod* code = method->code();
208   address entry;
209   bool to_compiled = code != nullptr && code->is_in_use() && !code->is_unloading();
210 
211   if (to_compiled) {
212     entry = code->entry_point();
213   } else {
214     entry = method->get_c2i_unverified_entry();
215   }
216 
217   log_trace(inlinecache)("IC@" INTPTR_FORMAT ": monomorphic to %s: %s",
218                          p2i(_call->instruction_address()),
219                          to_compiled ? "compiled" : "interpreter",
220                          method->print_value_string());
221 
222   _call->set_destination_mt_safe(entry);
223 }
224 
225 void CompiledIC::set_to_megamorphic(CallInfo* call_info) {
226   assert(data()->is_initialized(), "must be initialized");
227 
228   address entry;
229   if (call_info->call_kind() == CallInfo::direct_call) {
230     // C1 sometimes compiles a callsite before the target method is loaded, resulting in
231     // dynamically bound callsites that should really be statically bound. However, the
232     // target method might not have a vtable or itable. We just wait for better code to arrive
233     return;
234   } else if (call_info->call_kind() == CallInfo::itable_call) {
235     int itable_index = call_info->itable_index();
236     entry = VtableStubs::find_itable_stub(itable_index);
237     if (entry == nullptr) {
238       return;
239     }
240 #ifdef ASSERT
241     int index = call_info->resolved_method()->itable_index();
242     assert(index == itable_index, "CallInfo pre-computes this");
243     InstanceKlass* k = call_info->resolved_method()->method_holder();
244     assert(k->verify_itable_index(itable_index), "sanity check");
245 #endif //ASSERT
246   } else {
247     assert(call_info->call_kind() == CallInfo::vtable_call, "what else?");
248     // Can be different than selected_method->vtable_index(), due to package-private etc.
249     int vtable_index = call_info->vtable_index();
250     assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
251     entry = VtableStubs::find_vtable_stub(vtable_index);
252     if (entry == nullptr) {
253       return;
254     }
255   }
256 
257   log_trace(inlinecache)("IC@" INTPTR_FORMAT ": to megamorphic %s entry: " INTPTR_FORMAT,
258                          p2i(_call->instruction_address()), call_info->selected_method()->print_value_string(), p2i(entry));
259 
260   _call->set_destination_mt_safe(entry);
261   assert(is_megamorphic(), "sanity check");
262 }
263 
264 void CompiledIC::update(CallInfo* call_info, Klass* receiver_klass) {
265   // If this is the first time we fix the inline cache, we ensure it's initialized
266   ensure_initialized(call_info, receiver_klass);
267 
268   if (is_megamorphic()) {
269     // Terminal state for the inline cache
270     return;
271   }
272 
273   if (is_speculated_klass(receiver_klass)) {
274     // If the speculated class matches the receiver klass, we can speculate that will
275     // continue to be the case with a monomorphic inline cache
276     set_to_monomorphic();
277   } else {
278     // If the dynamic type speculation fails, we try to transform to a megamorphic state
279     // for the inline cache using stubs to dispatch in tables
280     set_to_megamorphic(call_info);
281   }
282 }
283 
284 bool CompiledIC::is_clean() const {
285   return destination() == SharedRuntime::get_resolve_virtual_call_stub();
286 }
287 
288 bool CompiledIC::is_monomorphic() const {
289   return !is_clean() && !is_megamorphic();
290 }
291 
292 bool CompiledIC::is_megamorphic() const {
293   return VtableStubs::entry_point(destination()) != nullptr;;
294 }
295 
296 bool CompiledIC::is_speculated_klass(Klass* receiver_klass) {
297   return data()->speculated_klass() == receiver_klass;
298 }
299 
300 // GC support
301 void CompiledIC::clean_metadata() {
302   data()->clean_metadata();
303 }
304 
305 void CompiledIC::metadata_do(MetadataClosure* cl) {
306   data()->metadata_do(cl);
307 }
308 
309 #ifndef PRODUCT
310 void CompiledIC::print() {
311   tty->print("Inline cache at " INTPTR_FORMAT ", calling " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
312              p2i(instruction_address()), p2i(destination()), p2i(data()));
313   tty->cr();
314 }
315 
316 void CompiledIC::verify() {
317   _call->verify();
318 }
319 #endif
320 
321 // ----------------------------------------------------------------------------
322 
323 void CompiledDirectCall::set_to_clean() {
324   // in_use is unused but needed to match template function in CompiledMethod
325   assert(CompiledICLocker::is_safe(instruction_address()), "mt unsafe call");
326   // Reset call site
327   RelocIterator iter((nmethod*)nullptr, instruction_address(), instruction_address() + 1);
328   while (iter.next()) {
329     switch(iter.type()) {
330     case relocInfo::static_call_type:
331       _call->set_destination_mt_safe(SharedRuntime::get_resolve_static_call_stub());
332       break;
333     case relocInfo::opt_virtual_call_type:
334       _call->set_destination_mt_safe(SharedRuntime::get_resolve_opt_virtual_call_stub());
335       break;
336     default:
337       ShouldNotReachHere();
338     }
339   }
340   assert(is_clean(), "should be clean after cleaning");
341 
342   log_debug(inlinecache)("DC@" INTPTR_FORMAT ": set to clean", p2i(_call->instruction_address()));
343 }
344 
345 void CompiledDirectCall::set(const methodHandle& callee_method) {
346   CompiledMethod* code = callee_method->code();
347   CompiledMethod* caller = CodeCache::find_compiled(instruction_address());
348 
349   bool to_interp_cont_enter = caller->method()->is_continuation_enter_intrinsic() &&
350                               ContinuationEntry::is_interpreted_call(instruction_address());
351 
352   bool to_compiled = !to_interp_cont_enter && code != nullptr && code->is_in_use() && !code->is_unloading();
353 
354   if (to_compiled) {
355     _call->set_destination_mt_safe(code->verified_entry_point());
356     assert(is_call_to_compiled(), "should be compiled after set to compiled");
357   } else {
358     // Patch call site to C2I adapter if code is deoptimized or unloaded.
359     // We also need to patch the static call stub to set the rmethod register
360     // to the callee_method so the c2i adapter knows how to build the frame
361     set_to_interpreted(callee_method, callee_method->get_c2i_entry());
362     assert(is_call_to_interpreted(), "should be interpreted after set to interpreted");
363   }
364 
365   log_trace(inlinecache)("DC@" INTPTR_FORMAT ": set to %s: %s: " INTPTR_FORMAT,
366                          p2i(_call->instruction_address()),
367                          to_compiled ? "compiled" : "interpreter",
368                          callee_method->print_value_string(),
369                          p2i(_call->destination()));
370 }
371 
372 bool CompiledDirectCall::is_clean() const {
373   return destination() == SharedRuntime::get_resolve_static_call_stub() ||
374          destination() == SharedRuntime::get_resolve_opt_virtual_call_stub();
375 }
376 
377 bool CompiledDirectCall::is_call_to_interpreted() const {
378   // It is a call to interpreted, if it calls to a stub. Hence, the destination
379   // must be in the stub part of the nmethod that contains the call
380   CompiledMethod* cm = CodeCache::find_compiled(instruction_address());
381   return cm->stub_contains(destination());
382 }
383 
384 bool CompiledDirectCall::is_call_to_compiled() const {
385   CompiledMethod* caller = CodeCache::find_compiled(instruction_address());
386   CodeBlob* dest_cb = CodeCache::find_blob(destination());
387   return !caller->stub_contains(destination()) && dest_cb->is_compiled();
388 }
389 
390 address CompiledDirectCall::find_stub_for(address instruction) {
391   // Find reloc. information containing this call-site
392   RelocIterator iter((nmethod*)nullptr, instruction);
393   while (iter.next()) {
394     if (iter.addr() == instruction) {
395       switch(iter.type()) {
396         case relocInfo::static_call_type:
397           return iter.static_call_reloc()->static_stub();
398         // We check here for opt_virtual_call_type, since we reuse the code
399         // from the CompiledIC implementation
400         case relocInfo::opt_virtual_call_type:
401           return iter.opt_virtual_call_reloc()->static_stub();
402         default:
403           ShouldNotReachHere();
404       }
405     }
406   }
407   return nullptr;
408 }
409 
410 address CompiledDirectCall::find_stub() {
411   return find_stub_for(instruction_address());
412 }
413 
414 #ifndef PRODUCT
415 void CompiledDirectCall::print() {
416   tty->print("direct call at " INTPTR_FORMAT " to " INTPTR_FORMAT " -> ", p2i(instruction_address()), p2i(destination()));
417   if (is_clean()) {
418     tty->print("clean");
419   } else if (is_call_to_compiled()) {
420     tty->print("compiled");
421   } else if (is_call_to_interpreted()) {
422     tty->print("interpreted");
423   }
424   tty->cr();
425 }
426 
427 void CompiledDirectCall::verify_mt_safe(const methodHandle& callee, address entry,
428                                         NativeMovConstReg* method_holder,
429                                         NativeJump* jump) {
430   _call->verify();
431   // A generated lambda form might be deleted from the Lambdaform
432   // cache in MethodTypeForm.  If a jit compiled lambdaform method
433   // becomes not entrant and the cache access returns null, the new
434   // resolve will lead to a new generated LambdaForm.
435   Method* old_method = reinterpret_cast<Method*>(method_holder->data());
436   assert(old_method == nullptr || old_method == callee() ||
437          callee->is_compiled_lambda_form() ||
438          !old_method->method_holder()->is_loader_alive() ||
439          old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
440          "a) MT-unsafe modification of inline cache");
441 
442   address destination = jump->jump_destination();
443   assert(destination == (address)-1 || destination == entry
444          || old_method == nullptr || !old_method->method_holder()->is_loader_alive() // may have a race due to class unloading.
445          || old_method->is_old(),  // may be race patching deoptimized nmethod due to redefinition.
446          "b) MT-unsafe modification of inline cache");
447 }
448 #endif