1 /*
  2  * Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "cds/aotConstantPoolResolver.hpp"
 26 #include "cds/archiveBuilder.hpp"
 27 #include "cds/cdsConfig.hpp"
 28 #include "cds/heapShared.hpp"
 29 #include "classfile/resolutionErrors.hpp"
 30 #include "classfile/systemDictionary.hpp"
 31 #include "classfile/systemDictionaryShared.hpp"
 32 #include "classfile/vmClasses.hpp"
 33 #include "code/codeCache.hpp"
 34 #include "interpreter/bytecodeStream.hpp"
 35 #include "interpreter/bytecodes.hpp"
 36 #include "interpreter/interpreter.hpp"
 37 #include "interpreter/linkResolver.hpp"
 38 #include "interpreter/rewriter.hpp"
 39 #include "logging/log.hpp"
 40 #include "logging/logStream.hpp"
 41 #include "memory/metadataFactory.hpp"
 42 #include "memory/metaspaceClosure.hpp"
 43 #include "memory/resourceArea.hpp"
 44 #include "oops/access.inline.hpp"
 45 #include "oops/compressedOops.hpp"
 46 #include "oops/constantPool.inline.hpp"
 47 #include "oops/cpCache.inline.hpp"
 48 #include "oops/method.inline.hpp"
 49 #include "oops/objArrayOop.inline.hpp"
 50 #include "oops/oop.inline.hpp"
 51 #include "oops/resolvedFieldEntry.hpp"
 52 #include "oops/resolvedIndyEntry.hpp"
 53 #include "oops/resolvedMethodEntry.hpp"
 54 #include "prims/methodHandles.hpp"
 55 #include "runtime/arguments.hpp"
 56 #include "runtime/atomic.hpp"
 57 #include "runtime/handles.inline.hpp"
 58 #include "runtime/mutexLocker.hpp"
 59 #include "runtime/synchronizer.hpp"
 60 #include "runtime/vm_version.hpp"
 61 #include "utilities/macros.hpp"
 62 
 63 // Implementation of ConstantPoolCache
 64 
 65 template <class T>
 66 static Array<T>* initialize_resolved_entries_array(ClassLoaderData* loader_data, GrowableArray<T> entries, TRAPS) {
 67   Array<T>* resolved_entries;
 68   if (entries.length() != 0) {
 69     resolved_entries = MetadataFactory::new_array<T>(loader_data, entries.length(), CHECK_NULL);
 70     for (int i = 0; i < entries.length(); i++) {
 71       resolved_entries->at_put(i, entries.at(i));
 72     }
 73     return resolved_entries;
 74   }
 75   return nullptr;
 76 }
 77 
 78 void ConstantPoolCache::set_direct_or_vtable_call(Bytecodes::Code invoke_code,
 79                                                        int method_index,
 80                                                        const methodHandle& method,
 81                                                        int vtable_index,
 82                                                        bool sender_is_interface) {
 83   bool is_vtable_call = (vtable_index >= 0);  // FIXME: split this method on this boolean
 84   assert(method->interpreter_entry() != nullptr, "should have been set at this point");
 85   assert(!method->is_obsolete(),  "attempt to write obsolete method to cpCache");
 86 
 87   int byte_no = -1;
 88   bool change_to_virtual = false;
 89   InstanceKlass* holder = nullptr;  // have to declare this outside the switch
 90   ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index);
 91   switch (invoke_code) {
 92     case Bytecodes::_invokeinterface:
 93       holder = method->method_holder();
 94       // check for private interface method invocations
 95       if (vtable_index == Method::nonvirtual_vtable_index && holder->is_interface() ) {
 96         assert(method->is_private(), "unexpected non-private method");
 97         assert(method->can_be_statically_bound(), "unexpected non-statically-bound method");
 98 
 99         method_entry->set_flags((                             1      << ResolvedMethodEntry::is_vfinal_shift) |
100                                 ((method->is_final_method() ? 1 : 0) << ResolvedMethodEntry::is_final_shift));
101         method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters());
102         assert(method_entry->is_vfinal(), "flags must be set");
103         method_entry->set_method(method());
104         byte_no = 2;
105         method_entry->set_klass(holder);
106         break;
107       }
108       else {
109         // We get here from InterpreterRuntime::resolve_invoke when an invokeinterface
110         // instruction links to a non-interface method (in Object). This can happen when
111         // an interface redeclares an Object method (like CharSequence declaring toString())
112         // or when invokeinterface is used explicitly.
113         // In that case, the method has no itable index and must be invoked as a virtual.
114         // Set a flag to keep track of this corner case.
115         assert(holder->is_interface() || holder == vmClasses::Object_klass(), "unexpected holder class");
116         assert(method->is_public(), "Calling non-public method in Object with invokeinterface");
117         change_to_virtual = true;
118 
119         // ...and fall through as if we were handling invokevirtual:
120       }
121     case Bytecodes::_invokevirtual:
122       {
123         if (!is_vtable_call) {
124           assert(method->can_be_statically_bound(), "");
125           method_entry->set_flags((                             1      << ResolvedMethodEntry::is_vfinal_shift) |
126                                   ((method->is_final_method() ? 1 : 0) << ResolvedMethodEntry::is_final_shift)  |
127                                   ((change_to_virtual         ? 1 : 0) << ResolvedMethodEntry::is_forced_virtual_shift));
128           method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters());
129           assert(method_entry->is_vfinal(), "flags must be set");
130           method_entry->set_method(method());
131         } else {
132           assert(!method->can_be_statically_bound(), "");
133           assert(vtable_index >= 0, "valid index");
134           assert(!method->is_final_method(), "sanity");
135           method_entry->set_flags((change_to_virtual ? 1 : 0) << ResolvedMethodEntry::is_forced_virtual_shift);
136           method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters());
137           assert(!method_entry->is_vfinal(), "flags must not be set");
138           method_entry->set_table_index(vtable_index);
139         }
140         byte_no = 2;
141         break;
142       }
143 
144     case Bytecodes::_invokespecial:
145     case Bytecodes::_invokestatic: {
146       assert(!is_vtable_call, "");
147       // Note:  Read and preserve the value of the is_vfinal flag on any
148       // invokevirtual bytecode shared with this constant pool cache entry.
149       // It is cheap and safe to consult is_vfinal() at all times.
150       // Once is_vfinal is set, it must stay that way, lest we get a dangling oop.
151       bool vfinal = method_entry->is_vfinal();
152       method_entry->set_flags(((method->is_final_method() ? 1 : 0) << ResolvedMethodEntry::is_final_shift));
153       assert(vfinal == method_entry->is_vfinal(), "Vfinal flag must be preserved");
154       method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters());
155       method_entry->set_method(method());
156       byte_no = 1;
157       break;
158     }
159     default:
160       ShouldNotReachHere();
161       break;
162   }
163 
164   // Note:  byte_no also appears in TemplateTable::resolve.
165   if (byte_no == 1) {
166     assert(invoke_code != Bytecodes::_invokevirtual &&
167            invoke_code != Bytecodes::_invokeinterface, "");
168     bool do_resolve = true;
169     // Don't mark invokespecial to method as resolved if sender is an interface.  The receiver
170     // has to be checked that it is a subclass of the current class every time this bytecode
171     // is executed.
172     if (invoke_code == Bytecodes::_invokespecial && sender_is_interface &&
173         method->name() != vmSymbols::object_initializer_name()) {
174       do_resolve = false;
175     }
176     if (invoke_code == Bytecodes::_invokestatic) {
177       assert(method->method_holder()->is_initialized() ||
178              method->method_holder()->is_reentrant_initialization(JavaThread::current()),
179              "invalid class initialization state for invoke_static");
180 
181       if (!VM_Version::supports_fast_class_init_checks() && method->needs_clinit_barrier()) {
182         // Don't mark invokestatic to method as resolved if the holder class has not yet completed
183         // initialization. An invokestatic must only proceed if the class is initialized, but if
184         // we resolve it before then that class initialization check is skipped.
185         //
186         // When fast class initialization checks are supported (VM_Version::supports_fast_class_init_checks() == true),
187         // template interpreter supports fast class initialization check for
188         // invokestatic which doesn't require call site re-resolution to
189         // enforce class initialization barrier.
190         do_resolve = false;
191       }
192     }
193     if (do_resolve) {
194       method_entry->set_bytecode1(invoke_code);
195     }
196   } else if (byte_no == 2)  {
197     if (change_to_virtual) {
198       assert(invoke_code == Bytecodes::_invokeinterface, "");
199       // NOTE: THIS IS A HACK - BE VERY CAREFUL!!!
200       //
201       // Workaround for the case where we encounter an invokeinterface, but we
202       // should really have an _invokevirtual since the resolved method is a
203       // virtual method in java.lang.Object. This is a corner case in the spec
204       // but is presumably legal. javac does not generate this code.
205       //
206       // We do not set bytecode_1() to _invokeinterface, because that is the
207       // bytecode # used by the interpreter to see if it is resolved.  In this
208       // case, the method gets reresolved with caller for each interface call
209       // because the actual selected method may not be public.
210       //
211       // We set bytecode_2() to _invokevirtual.
212       // See also interpreterRuntime.cpp. (8/25/2000)
213       invoke_code = Bytecodes::_invokevirtual;
214     } else {
215       assert(invoke_code == Bytecodes::_invokevirtual ||
216              (invoke_code == Bytecodes::_invokeinterface &&
217               ((method->is_private() ||
218                 (method->is_final() && method->method_holder() == vmClasses::Object_klass())))),
219              "unexpected invocation mode");
220       if (invoke_code == Bytecodes::_invokeinterface &&
221           (method->is_private() || method->is_final())) {
222         // We set bytecode_1() to _invokeinterface, because that is the
223         // bytecode # used by the interpreter to see if it is resolved.
224         // We set bytecode_2() to _invokevirtual.
225         method_entry->set_bytecode1(invoke_code);
226       }
227     }
228     // set up for invokevirtual, even if linking for invokeinterface also:
229     method_entry->set_bytecode2(invoke_code);
230   } else {
231     ShouldNotReachHere();
232   }
233 }
234 
235 void ConstantPoolCache::set_direct_call(Bytecodes::Code invoke_code, int method_index, const methodHandle& method,
236                                         bool sender_is_interface) {
237   int index = Method::nonvirtual_vtable_index;
238   // index < 0; FIXME: inline and customize set_direct_or_vtable_call
239   set_direct_or_vtable_call(invoke_code, method_index, method, index, sender_is_interface);
240 }
241 
242 void ConstantPoolCache::set_vtable_call(Bytecodes::Code invoke_code, int method_index, const methodHandle& method, int index) {
243   // either the method is a miranda or its holder should accept the given index
244   assert(method->method_holder()->is_interface() || method->method_holder()->verify_vtable_index(index), "");
245   // index >= 0; FIXME: inline and customize set_direct_or_vtable_call
246   set_direct_or_vtable_call(invoke_code, method_index, method, index, false);
247 }
248 
249 void ConstantPoolCache::set_itable_call(Bytecodes::Code invoke_code,
250                                         int method_index,
251                                         Klass* referenced_klass,
252                                         const methodHandle& method, int index) {
253   assert(method->method_holder()->verify_itable_index(index), "");
254   assert(invoke_code == Bytecodes::_invokeinterface, "");
255   InstanceKlass* interf = method->method_holder();
256   assert(interf->is_interface(), "must be an interface");
257   assert(!method->is_final_method(), "interfaces do not have final methods; cannot link to one here");
258   ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index);
259   method_entry->set_klass(static_cast<InstanceKlass*>(referenced_klass));
260   method_entry->set_method(method());
261   method_entry->fill_in((u1)as_TosState(method->result_type()), (u2)method()->size_of_parameters());
262   method_entry->set_bytecode1(Bytecodes::_invokeinterface);
263 }
264 
265 ResolvedMethodEntry* ConstantPoolCache::set_method_handle(int method_index, const CallInfo &call_info) {
266   // NOTE: This method entry can be the subject of data races.
267   // There are three words to update: flags, refs[appendix_index], method (in that order).
268   // Writers must store all other values before method.
269   // Readers must test the method first for non-null before reading other fields.
270   // Competing writers must acquire exclusive access via a lock.
271   // A losing writer waits on the lock until the winner writes the method and leaves
272   // the lock, so that when the losing writer returns, he can use the linked
273   // cache entry.
274 
275   // Lock fields to write
276   Bytecodes::Code invoke_code = Bytecodes::_invokehandle;
277 
278   JavaThread* current = JavaThread::current();
279   objArrayHandle resolved_references(current, constant_pool()->resolved_references());
280   // Use the resolved_references() lock for this cpCache entry.
281   // resolved_references are created for all classes with Invokedynamic, MethodHandle
282   // or MethodType constant pool cache entries.
283   assert(resolved_references() != nullptr,
284          "a resolved_references array should have been created for this class");
285   ObjectLocker ol(resolved_references, current);
286 
287   ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index);
288   if (method_entry->is_resolved(invoke_code)) {
289     return method_entry;
290   }
291 
292   Method* adapter            = call_info.resolved_method();
293   const Handle appendix      = call_info.resolved_appendix();
294   const bool has_appendix    = appendix.not_null();
295 
296   // Write the flags.
297   // MHs are always sig-poly and have a local signature.
298   method_entry->fill_in((u1)as_TosState(adapter->result_type()), (u2)adapter->size_of_parameters());
299   method_entry->set_flags(((has_appendix    ? 1 : 0) << ResolvedMethodEntry::has_appendix_shift        ) |
300                           (                   1      << ResolvedMethodEntry::has_local_signature_shift ) |
301                           (                   1      << ResolvedMethodEntry::is_final_shift            ));
302 
303   // Method handle invokes use both a method and a resolved references index.
304   // refs[appendix_index], if not null, contains a value passed as a trailing argument to the adapter.
305   // In the general case, this could be the call site's MethodType,
306   // for use with java.lang.Invokers.checkExactType, or else a CallSite object.
307   // method_entry->method() contains the adapter method which manages the actual call.
308   // In the general case, this is a compiled LambdaForm.
309   // (The Java code is free to optimize these calls by binding other
310   // sorts of methods and appendices to call sites.)
311   // JVM-level linking is via the method, as if for invokespecial, and signatures are erased.
312   // The appendix argument (if any) is added to the signature, and is counted in the parameter_size bits.
313   // Even with the appendix, the method will never take more than 255 parameter slots.
314   //
315   // This means that given a call site like (List)mh.invoke("foo"),
316   // the method has signature '(Ljl/Object;Ljl/invoke/MethodType;)Ljl/Object;',
317   // not '(Ljava/lang/String;)Ljava/util/List;'.
318   // The fact that String and List are involved is encoded in the MethodType in refs[appendix_index].
319   // This allows us to create fewer Methods, while keeping type safety.
320   //
321 
322   // Store appendix, if any.
323   if (has_appendix) {
324     const int appendix_index = method_entry->resolved_references_index();
325     assert(appendix_index >= 0 && appendix_index < resolved_references->length(), "oob");
326     assert(resolved_references->obj_at(appendix_index) == nullptr, "init just once");
327     resolved_references->obj_at_put(appendix_index, appendix());
328   }
329 
330   method_entry->set_method(adapter); // This must be the last one to set (see NOTE above)!
331 
332   // The interpreter assembly code does not check byte_2,
333   // but it is used by is_resolved, method_if_resolved, etc.
334   method_entry->set_bytecode1(invoke_code);
335 
336   assert(has_appendix == method_entry->has_appendix(), "proper storage of appendix flag");
337   assert(method_entry->has_local_signature(), "proper storage of signature flag");
338   return method_entry;
339 }
340 
341 Method* ConstantPoolCache::method_if_resolved(int method_index) const {
342   // Decode the action of set_method and set_interface_call
343   ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index);
344 
345   Bytecodes::Code invoke_code = (Bytecodes::Code)method_entry->bytecode1();
346   switch (invoke_code) {
347     case Bytecodes::_invokeinterface:
348     case Bytecodes::_invokestatic:
349     case Bytecodes::_invokespecial:
350       assert(!method_entry->has_appendix(), "");
351       // fall through
352     case Bytecodes::_invokehandle:
353       return method_entry->method();
354     case Bytecodes::_invokedynamic:
355       ShouldNotReachHere();
356     default:
357       assert(invoke_code == (Bytecodes::Code)0, "unexpected bytecode");
358       break;
359   }
360 
361   invoke_code = (Bytecodes::Code)method_entry->bytecode2();
362   if (invoke_code == Bytecodes::_invokevirtual) {
363     if (method_entry->is_vfinal()) {
364       return method_entry->method();
365     } else {
366       int holder_index = constant_pool()->uncached_klass_ref_index_at(method_entry->constant_pool_index());
367       if (constant_pool()->tag_at(holder_index).is_klass()) {
368         Klass* klass = constant_pool()->resolved_klass_at(holder_index);
369         return klass->method_at_vtable(method_entry->table_index());
370       }
371     }
372   }
373   return nullptr;
374 }
375 
376 ConstantPoolCache* ConstantPoolCache::allocate(ClassLoaderData* loader_data,
377                                      const intStack& invokedynamic_map,
378                                      const GrowableArray<ResolvedIndyEntry> indy_entries,
379                                      const GrowableArray<ResolvedFieldEntry> field_entries,
380                                      const GrowableArray<ResolvedMethodEntry> method_entries,
381                                      TRAPS) {
382 
383   int size = ConstantPoolCache::size();
384 
385   // Initialize resolved entry arrays with available data
386   Array<ResolvedFieldEntry>* resolved_field_entries = initialize_resolved_entries_array(loader_data, field_entries, CHECK_NULL);
387   Array<ResolvedIndyEntry>* resolved_indy_entries = initialize_resolved_entries_array(loader_data, indy_entries, CHECK_NULL);
388   Array<ResolvedMethodEntry>* resolved_method_entries = initialize_resolved_entries_array(loader_data, method_entries, CHECK_NULL);
389 
390   return new (loader_data, size, MetaspaceObj::ConstantPoolCacheType, THREAD)
391               ConstantPoolCache(invokedynamic_map, resolved_indy_entries, resolved_field_entries, resolved_method_entries);
392 }
393 
394 // Record the GC marking cycle when redefined vs. when found in the loom stack chunks.
395 void ConstantPoolCache::record_gc_epoch() {
396   _gc_epoch = CodeCache::gc_epoch();
397 }
398 
399 #if INCLUDE_CDS
400 void ConstantPoolCache::remove_unshareable_info() {
401   assert(CDSConfig::is_dumping_archive(), "sanity");
402 
403   if (_resolved_indy_entries != nullptr) {
404     remove_resolved_indy_entries_if_non_deterministic();
405   }
406   if (_resolved_field_entries != nullptr) {
407     remove_resolved_field_entries_if_non_deterministic();
408   }
409   if (_resolved_method_entries != nullptr) {
410     remove_resolved_method_entries_if_non_deterministic();
411   }
412 
413 #if INCLUDE_CDS_JAVA_HEAP
414   _archived_references_index = -1;
415   if (CDSConfig::is_dumping_heap()) {
416     ConstantPool* src_cp = ArchiveBuilder::current()->get_source_addr(constant_pool());
417     oop rr = HeapShared::scratch_resolved_references(src_cp);
418     if (rr != nullptr) {
419       _archived_references_index = HeapShared::append_root(rr);
420     }
421   }
422 #endif
423 }
424 
425 void ConstantPoolCache::remove_resolved_field_entries_if_non_deterministic() {
426   ConstantPool* cp = constant_pool();
427   ConstantPool* src_cp =  ArchiveBuilder::current()->get_source_addr(cp);
428   for (int i = 0; i < _resolved_field_entries->length(); i++) {
429     ResolvedFieldEntry* rfi = _resolved_field_entries->adr_at(i);
430     int cp_index = rfi->constant_pool_index();
431     bool archived = false;
432     bool resolved = rfi->is_resolved(Bytecodes::_getfield)  ||
433                     rfi->is_resolved(Bytecodes::_putfield);
434     if (resolved && AOTConstantPoolResolver::is_resolution_deterministic(src_cp, cp_index)) {
435       rfi->mark_and_relocate();
436       archived = true;
437     } else {
438       rfi->remove_unshareable_info();
439     }
440     if (resolved) {
441       LogStreamHandle(Trace, cds, resolve) log;
442       if (log.is_enabled()) {
443         ResourceMark rm;
444         int klass_cp_index = cp->uncached_klass_ref_index_at(cp_index);
445         Symbol* klass_name = cp->klass_name_at(klass_cp_index);
446         Symbol* name = cp->uncached_name_ref_at(cp_index);
447         Symbol* signature = cp->uncached_signature_ref_at(cp_index);
448         log.print("%s field  CP entry [%3d]: %s => %s.%s:%s",
449                   (archived ? "archived" : "reverted"),
450                   cp_index,
451                   cp->pool_holder()->name()->as_C_string(),
452                   klass_name->as_C_string(), name->as_C_string(), signature->as_C_string());
453       }
454     }
455     ArchiveBuilder::alloc_stats()->record_field_cp_entry(archived, resolved && !archived);
456   }
457 }
458 
459 void ConstantPoolCache::remove_resolved_method_entries_if_non_deterministic() {
460   ConstantPool* cp = constant_pool();
461   ConstantPool* src_cp =  ArchiveBuilder::current()->get_source_addr(cp);
462   for (int i = 0; i < _resolved_method_entries->length(); i++) {
463     ResolvedMethodEntry* rme = _resolved_method_entries->adr_at(i);
464     int cp_index = rme->constant_pool_index();
465     bool archived = false;
466     bool resolved = rme->is_resolved(Bytecodes::_invokevirtual)   ||
467                     rme->is_resolved(Bytecodes::_invokespecial)   ||
468                     rme->is_resolved(Bytecodes::_invokeinterface) ||
469                     rme->is_resolved(Bytecodes::_invokehandle);
470 
471     // Just for safety -- this should not happen, but do not archive if we ever see this.
472     resolved &= !(rme->is_resolved(Bytecodes::_invokestatic));
473 
474     if (resolved && can_archive_resolved_method(src_cp, rme)) {
475       rme->mark_and_relocate(src_cp);
476       archived = true;
477     } else {
478       rme->remove_unshareable_info();
479     }
480     if (resolved) {
481       LogStreamHandle(Trace, cds, resolve) log;
482       if (log.is_enabled()) {
483         ResourceMark rm;
484         int klass_cp_index = cp->uncached_klass_ref_index_at(cp_index);
485         Symbol* klass_name = cp->klass_name_at(klass_cp_index);
486         Symbol* name = cp->uncached_name_ref_at(cp_index);
487         Symbol* signature = cp->uncached_signature_ref_at(cp_index);
488         log.print("%s%s method CP entry [%3d]: %s %s.%s:%s",
489                   (archived ? "archived" : "reverted"),
490                   (rme->is_resolved(Bytecodes::_invokeinterface) ? " interface" : ""),
491                   cp_index,
492                   cp->pool_holder()->name()->as_C_string(),
493                   klass_name->as_C_string(), name->as_C_string(), signature->as_C_string());
494         if (archived) {
495           Klass* resolved_klass = cp->resolved_klass_at(klass_cp_index);
496           log.print(" => %s%s",
497                     resolved_klass->name()->as_C_string(),
498                     (rme->is_resolved(Bytecodes::_invokestatic) ? " *** static" : ""));
499         }
500       }
501       ArchiveBuilder::alloc_stats()->record_method_cp_entry(archived, resolved && !archived);
502     }
503   }
504 }
505 
506 void ConstantPoolCache::remove_resolved_indy_entries_if_non_deterministic() {
507   ConstantPool* cp = constant_pool();
508   ConstantPool* src_cp =  ArchiveBuilder::current()->get_source_addr(cp);
509   for (int i = 0; i < _resolved_indy_entries->length(); i++) {
510     ResolvedIndyEntry* rei = _resolved_indy_entries->adr_at(i);
511     int cp_index = rei->constant_pool_index();
512     bool archived = false;
513     bool resolved = rei->is_resolved();
514     if (resolved && AOTConstantPoolResolver::is_resolution_deterministic(src_cp, cp_index)) {
515       rei->mark_and_relocate();
516       archived = true;
517     } else {
518       rei->remove_unshareable_info();
519     }
520     if (resolved) {
521       LogStreamHandle(Trace, cds, resolve) log;
522       if (log.is_enabled()) {
523         ResourceMark rm;
524         int bsm = cp->bootstrap_method_ref_index_at(cp_index);
525         int bsm_ref = cp->method_handle_index_at(bsm);
526         Symbol* bsm_name = cp->uncached_name_ref_at(bsm_ref);
527         Symbol* bsm_signature = cp->uncached_signature_ref_at(bsm_ref);
528         Symbol* bsm_klass = cp->klass_name_at(cp->uncached_klass_ref_index_at(bsm_ref));
529         log.print("%s indy   CP entry [%3d]: %s (%d)",
530                   (archived ? "archived" : "reverted"),
531                   cp_index, cp->pool_holder()->name()->as_C_string(), i);
532         log.print(" %s %s.%s:%s", (archived ? "=>" : "  "), bsm_klass->as_C_string(),
533                   bsm_name->as_C_string(), bsm_signature->as_C_string());
534       }
535       ArchiveBuilder::alloc_stats()->record_indy_cp_entry(archived, resolved && !archived);
536     }
537   }
538 }
539 
540 bool ConstantPoolCache::can_archive_resolved_method(ConstantPool* src_cp, ResolvedMethodEntry* method_entry) {
541   InstanceKlass* pool_holder = constant_pool()->pool_holder();
542   if (!(pool_holder->is_shared_boot_class() || pool_holder->is_shared_platform_class() ||
543         pool_holder->is_shared_app_class())) {
544     // Archiving resolved cp entries for classes from non-builtin loaders
545     // is not yet supported.
546     return false;
547   }
548 
549   if (CDSConfig::is_dumping_dynamic_archive()) {
550     // InstanceKlass::methods() has been resorted. We need to
551     // update the vtable_index in method_entry (not implemented)
552     return false;
553   }
554 
555   if (!method_entry->is_resolved(Bytecodes::_invokevirtual)) {
556     if (method_entry->method() == nullptr) {
557       return false;
558     }
559     if (method_entry->method()->is_continuation_native_intrinsic()) {
560       return false; // FIXME: corresponding stub is generated on demand during method resolution (see LinkResolver::resolve_static_call).
561     }
562   }
563 
564   int cp_index = method_entry->constant_pool_index();
565   assert(src_cp->tag_at(cp_index).is_method() || src_cp->tag_at(cp_index).is_interface_method(), "sanity");
566 
567   if (!AOTConstantPoolResolver::is_resolution_deterministic(src_cp, cp_index)) {
568     return false;
569   }
570 
571   if (method_entry->is_resolved(Bytecodes::_invokeinterface) ||
572       method_entry->is_resolved(Bytecodes::_invokevirtual) ||
573       method_entry->is_resolved(Bytecodes::_invokespecial)) {
574     return true;
575   } else if (method_entry->is_resolved(Bytecodes::_invokehandle)) {
576     if (CDSConfig::is_dumping_method_handles()) {
577       // invokehandle depends on archived MethodType and LambdaForms.
578       return true;
579     } else {
580       return false;
581     }
582   } else {
583     return false;
584   }
585 }
586 #endif // INCLUDE_CDS
587 
588 void ConstantPoolCache::deallocate_contents(ClassLoaderData* data) {
589   assert(!is_shared(), "shared caches are not deallocated");
590   data->remove_handle(_resolved_references);
591   set_resolved_references(OopHandle());
592   MetadataFactory::free_array<u2>(data, _reference_map);
593   set_reference_map(nullptr);
594 #if INCLUDE_CDS
595   if (_resolved_indy_entries != nullptr) {
596     MetadataFactory::free_array<ResolvedIndyEntry>(data, _resolved_indy_entries);
597     _resolved_indy_entries = nullptr;
598   }
599   if (_resolved_field_entries != nullptr) {
600     MetadataFactory::free_array<ResolvedFieldEntry>(data, _resolved_field_entries);
601     _resolved_field_entries = nullptr;
602   }
603   if (_resolved_method_entries != nullptr) {
604     MetadataFactory::free_array<ResolvedMethodEntry>(data, _resolved_method_entries);
605     _resolved_method_entries = nullptr;
606   }
607 #endif
608 }
609 
610 #if INCLUDE_CDS_JAVA_HEAP
611 oop ConstantPoolCache::archived_references() {
612   if (_archived_references_index < 0) {
613     return nullptr;
614   }
615   return HeapShared::get_root(_archived_references_index);
616 }
617 
618 void ConstantPoolCache::clear_archived_references() {
619   if (_archived_references_index >= 0) {
620     HeapShared::clear_root(_archived_references_index);
621     _archived_references_index = -1;
622   }
623 }
624 #endif
625 
626 #if INCLUDE_JVMTI
627 static void log_adjust(const char* entry_type, Method* old_method, Method* new_method, bool* trace_name_printed) {
628   ResourceMark rm;
629 
630   if (!(*trace_name_printed)) {
631     log_info(redefine, class, update)("adjust: name=%s", old_method->method_holder()->external_name());
632     *trace_name_printed = true;
633   }
634   log_trace(redefine, class, update, constantpool)
635     ("cpc %s entry update: %s", entry_type, new_method->external_name());
636 }
637 
638 // RedefineClasses() API support:
639 // If any entry of this ConstantPoolCache points to any of
640 // old_methods, replace it with the corresponding new_method.
641 void ConstantPoolCache::adjust_method_entries(bool * trace_name_printed) {
642   if (_resolved_indy_entries != nullptr) {
643     for (int j = 0; j < _resolved_indy_entries->length(); j++) {
644       Method* old_method = resolved_indy_entry_at(j)->method();
645       if (old_method == nullptr || !old_method->is_old()) {
646         continue;
647       }
648       assert(!old_method->is_deleted(), "cannot delete these methods");
649       Method* new_method = old_method->get_new_method();
650       resolved_indy_entry_at(j)->adjust_method_entry(new_method);
651       log_adjust("indy", old_method, new_method, trace_name_printed);
652     }
653   }
654   if (_resolved_method_entries != nullptr) {
655     for (int i = 0; i < _resolved_method_entries->length(); i++) {
656       ResolvedMethodEntry* method_entry = resolved_method_entry_at(i);
657       // get interesting method entry
658       Method* old_method = method_entry->method();
659       if (old_method == nullptr || !old_method->is_old()) {
660         continue; // skip uninteresting entries
661       }
662       if (old_method->is_deleted()) {
663         // clean up entries with deleted methods
664         method_entry->reset_entry();
665         continue;
666       }
667       Method* new_method = old_method->get_new_method();
668       method_entry->adjust_method_entry(new_method);
669       log_adjust("non-indy", old_method, new_method, trace_name_printed);
670     }
671   }
672 }
673 
674 // the constant pool cache should never contain old or obsolete methods
675 bool ConstantPoolCache::check_no_old_or_obsolete_entries() {
676   ResourceMark rm;
677   if (_resolved_indy_entries != nullptr) {
678     for (int i = 0; i < _resolved_indy_entries->length(); i++) {
679       Method* m = resolved_indy_entry_at(i)->method();
680       if (m != nullptr && !resolved_indy_entry_at(i)->check_no_old_or_obsolete_entry()) {
681         log_trace(redefine, class, update, constantpool)
682           ("cpcache check found old method entry: class: %s, old: %d, obsolete: %d, method: %s",
683            constant_pool()->pool_holder()->external_name(), m->is_old(), m->is_obsolete(), m->external_name());
684         return false;
685       }
686     }
687   }
688   if (_resolved_method_entries != nullptr) {
689     for (int i = 0; i < _resolved_method_entries->length(); i++) {
690       ResolvedMethodEntry* method_entry = resolved_method_entry_at(i);
691       Method* m = method_entry->method();
692       if (m != nullptr && !method_entry->check_no_old_or_obsolete_entry()) {
693         log_trace(redefine, class, update, constantpool)
694           ("cpcache check found old method entry: class: %s, old: %d, obsolete: %d, method: %s",
695            constant_pool()->pool_holder()->external_name(), m->is_old(), m->is_obsolete(), m->external_name());
696         return false;
697       }
698     }
699   }
700   return true;
701 }
702 
703 void ConstantPoolCache::dump_cache() {
704   print_on(tty);
705 }
706 #endif // INCLUDE_JVMTI
707 
708 void ConstantPoolCache::metaspace_pointers_do(MetaspaceClosure* it) {
709   log_trace(cds)("Iter(ConstantPoolCache): %p", this);
710   it->push(&_constant_pool);
711   it->push(&_reference_map);
712   if (_resolved_indy_entries != nullptr) {
713     it->push(&_resolved_indy_entries, MetaspaceClosure::_writable);
714   }
715   if (_resolved_field_entries != nullptr) {
716     it->push(&_resolved_field_entries, MetaspaceClosure::_writable);
717   }
718   if (_resolved_method_entries != nullptr) {
719     it->push(&_resolved_method_entries, MetaspaceClosure::_writable);
720   }
721 }
722 
723 bool ConstantPoolCache::save_and_throw_indy_exc(
724   const constantPoolHandle& cpool, int cpool_index, int index, constantTag tag, TRAPS) {
725 
726   assert(HAS_PENDING_EXCEPTION, "No exception got thrown!");
727   assert(PENDING_EXCEPTION->is_a(vmClasses::LinkageError_klass()),
728          "No LinkageError exception");
729 
730   // Use the resolved_references() lock for this cpCache entry.
731   // resolved_references are created for all classes with Invokedynamic, MethodHandle
732   // or MethodType constant pool cache entries.
733   JavaThread* current = THREAD;
734   objArrayHandle resolved_references(current, cpool->resolved_references());
735   assert(resolved_references() != nullptr,
736          "a resolved_references array should have been created for this class");
737   ObjectLocker ol(resolved_references, current);
738 
739   // if the indy_info is resolved or the indy_resolution_failed flag is set then another
740   // thread either succeeded in resolving the method or got a LinkageError
741   // exception, before this thread was able to record its failure.  So, clear
742   // this thread's exception and return false so caller can use the earlier
743   // thread's result.
744   if (resolved_indy_entry_at(index)->is_resolved() || resolved_indy_entry_at(index)->resolution_failed()) {
745     CLEAR_PENDING_EXCEPTION;
746     return false;
747   }
748   ResourceMark rm(THREAD);
749   Symbol* error = PENDING_EXCEPTION->klass()->name();
750   const char* message = java_lang_Throwable::message_as_utf8(PENDING_EXCEPTION);
751 
752   int encoded_index = ResolutionErrorTable::encode_indy_index(index);
753   SystemDictionary::add_resolution_error(cpool, encoded_index, error, message);
754   resolved_indy_entry_at(index)->set_resolution_failed();
755   return true;
756 }
757 
758 oop ConstantPoolCache::set_dynamic_call(const CallInfo &call_info, int index) {
759   ResourceMark rm;
760 
761   // Use the resolved_references() lock for this cpCache entry.
762   // resolved_references are created for all classes with Invokedynamic, MethodHandle
763   // or MethodType constant pool cache entries.
764   JavaThread* current = JavaThread::current();
765   constantPoolHandle cp(current, constant_pool());
766 
767   objArrayHandle resolved_references(current, cp->resolved_references());
768   assert(resolved_references() != nullptr,
769          "a resolved_references array should have been created for this class");
770   ObjectLocker ol(resolved_references, current);
771   assert(index >= 0, "Indy index must be positive at this point");
772 
773   if (resolved_indy_entry_at(index)->method() != nullptr) {
774     return cp->resolved_reference_from_indy(index);
775   }
776 
777   if (resolved_indy_entry_at(index)->resolution_failed()) {
778     // Before we got here, another thread got a LinkageError exception during
779     // resolution.  Ignore our success and throw their exception.
780     guarantee(index >= 0, "Invalid indy index");
781     int encoded_index = ResolutionErrorTable::encode_indy_index(index);
782     ConstantPool::throw_resolution_error(cp, encoded_index, current);
783     return nullptr;
784   }
785 
786   Method* adapter            = call_info.resolved_method();
787   const Handle appendix      = call_info.resolved_appendix();
788   const bool has_appendix    = appendix.not_null();
789 
790   LogStream* log_stream = nullptr;
791   LogStreamHandle(Debug, methodhandles, indy) lsh_indy;
792   if (lsh_indy.is_enabled()) {
793     ResourceMark rm;
794     log_stream = &lsh_indy;
795     log_stream->print_cr("set_method_handle bc=%d appendix=" PTR_FORMAT "%s method=" PTR_FORMAT " (local signature) ",
796                          0xba,
797                          p2i(appendix()),
798                          (has_appendix ? "" : " (unused)"),
799                          p2i(adapter));
800     adapter->print_on(log_stream);
801     if (has_appendix)  appendix()->print_on(log_stream);
802   }
803 
804   if (has_appendix) {
805     const int appendix_index = resolved_indy_entry_at(index)->resolved_references_index();
806     assert(appendix_index >= 0 && appendix_index < resolved_references->length(), "oob");
807     assert(resolved_references->obj_at(appendix_index) == nullptr, "init just once");
808     resolved_references->obj_at_put(appendix_index, appendix());
809   }
810 
811   // Populate entry with resolved information
812   assert(resolved_indy_entries() != nullptr, "Invokedynamic array is empty, cannot fill with resolved information");
813   resolved_indy_entry_at(index)->fill_in(adapter, adapter->size_of_parameters(), as_TosState(adapter->result_type()), has_appendix);
814 
815   if (log_stream != nullptr) {
816     resolved_indy_entry_at(index)->print_on(log_stream);
817   }
818   return appendix();
819 }
820 
821 oop ConstantPoolCache::appendix_if_resolved(int method_index) const {
822   ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index);
823   return appendix_if_resolved(method_entry);
824 }
825 
826 oop ConstantPoolCache::appendix_if_resolved(ResolvedMethodEntry* method_entry) const {
827   if (!method_entry->has_appendix())
828     return nullptr;
829   const int ref_index = method_entry->resolved_references_index();
830   return constant_pool()->resolved_reference_at(ref_index);
831 }
832 
833 // Printing
834 
835 void ConstantPoolCache::print_on(outputStream* st) const {
836   st->print_cr("%s", internal_name());
837   // print constant pool cache entries
838   if (_resolved_field_entries != nullptr) {
839     print_resolved_field_entries(st);
840   }
841   if (_resolved_method_entries != nullptr) {
842     print_resolved_method_entries(st);
843   }
844   if (_resolved_indy_entries != nullptr) {
845     print_resolved_indy_entries(st);
846   }
847 }
848 
849 void ConstantPoolCache::print_resolved_field_entries(outputStream* st) const {
850   for (int field_index = 0; field_index < resolved_field_entries_length(); field_index++) {
851     resolved_field_entry_at(field_index)->print_on(st);
852   }
853 }
854 
855 void ConstantPoolCache::print_resolved_method_entries(outputStream* st) const {
856   for (int method_index = 0; method_index < resolved_method_entries_length(); method_index++) {
857     ResolvedMethodEntry* method_entry = resolved_method_entry_at(method_index);
858     method_entry->print_on(st);
859     if (method_entry->has_appendix()) {
860       st->print("  appendix: ");
861       constant_pool()->resolved_reference_from_method(method_index)->print_on(st);
862     }
863   }
864 }
865 
866 void ConstantPoolCache::print_resolved_indy_entries(outputStream* st) const {
867   for (int indy_index = 0; indy_index < resolved_indy_entries_length(); indy_index++) {
868     ResolvedIndyEntry* indy_entry = resolved_indy_entry_at(indy_index);
869     indy_entry->print_on(st);
870     if (indy_entry->has_appendix()) {
871       st->print("  appendix: ");
872       constant_pool()->resolved_reference_from_indy(indy_index)->print_on(st);
873     }
874   }
875 }