1 /*
2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotClassInitializer.hpp"
26 #include "cds/aotMetaspace.hpp"
27 #include "cds/archiveUtils.hpp"
28 #include "cds/cdsConfig.hpp"
29 #include "cds/cdsEnumKlass.hpp"
30 #include "cds/classListWriter.hpp"
31 #include "cds/heapShared.hpp"
32 #include "classfile/classFileParser.hpp"
33 #include "classfile/classFileStream.hpp"
34 #include "classfile/classLoader.hpp"
35 #include "classfile/classLoaderData.inline.hpp"
36 #include "classfile/javaClasses.hpp"
37 #include "classfile/moduleEntry.hpp"
38 #include "classfile/systemDictionary.hpp"
39 #include "classfile/systemDictionaryShared.hpp"
40 #include "classfile/verifier.hpp"
41 #include "classfile/vmClasses.hpp"
42 #include "classfile/vmSymbols.hpp"
43 #include "code/codeCache.hpp"
44 #include "code/dependencyContext.hpp"
45 #include "compiler/compilationPolicy.hpp"
46 #include "compiler/compileBroker.hpp"
47 #include "gc/shared/collectedHeap.inline.hpp"
48 #include "interpreter/bytecodeStream.hpp"
49 #include "interpreter/oopMapCache.hpp"
50 #include "interpreter/rewriter.hpp"
51 #include "jvm.h"
52 #include "jvmtifiles/jvmti.h"
53 #include "klass.inline.hpp"
54 #include "logging/log.hpp"
55 #include "logging/logMessage.hpp"
56 #include "logging/logStream.hpp"
57 #include "memory/allocation.inline.hpp"
58 #include "memory/iterator.inline.hpp"
59 #include "memory/metadataFactory.hpp"
60 #include "memory/metaspaceClosure.hpp"
61 #include "memory/oopFactory.hpp"
62 #include "memory/resourceArea.hpp"
63 #include "memory/universe.hpp"
64 #include "oops/constantPool.hpp"
65 #include "oops/fieldStreams.inline.hpp"
66 #include "oops/instanceClassLoaderKlass.hpp"
67 #include "oops/instanceKlass.inline.hpp"
68 #include "oops/instanceMirrorKlass.hpp"
69 #include "oops/instanceOop.hpp"
70 #include "oops/instanceStackChunkKlass.hpp"
71 #include "oops/klass.inline.hpp"
72 #include "oops/method.hpp"
73 #include "oops/oop.inline.hpp"
74 #include "oops/recordComponent.hpp"
75 #include "oops/symbol.hpp"
76 #include "prims/jvmtiExport.hpp"
77 #include "prims/jvmtiRedefineClasses.hpp"
78 #include "prims/jvmtiThreadState.hpp"
79 #include "prims/methodComparator.hpp"
80 #include "runtime/arguments.hpp"
81 #include "runtime/atomicAccess.hpp"
82 #include "runtime/deoptimization.hpp"
83 #include "runtime/fieldDescriptor.inline.hpp"
84 #include "runtime/handles.inline.hpp"
85 #include "runtime/javaCalls.hpp"
86 #include "runtime/javaThread.inline.hpp"
87 #include "runtime/mutexLocker.hpp"
88 #include "runtime/orderAccess.hpp"
89 #include "runtime/os.inline.hpp"
90 #include "runtime/reflection.hpp"
91 #include "runtime/synchronizer.hpp"
92 #include "runtime/threads.hpp"
93 #include "services/classLoadingService.hpp"
94 #include "services/finalizerService.hpp"
95 #include "services/threadService.hpp"
96 #include "utilities/dtrace.hpp"
97 #include "utilities/events.hpp"
98 #include "utilities/macros.hpp"
99 #include "utilities/nativeStackPrinter.hpp"
100 #include "utilities/stringUtils.hpp"
101 #ifdef COMPILER1
102 #include "c1/c1_Compiler.hpp"
103 #endif
104 #if INCLUDE_JFR
105 #include "jfr/jfrEvents.hpp"
106 #endif
107
108 #ifdef DTRACE_ENABLED
109
110
111 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
112 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
113 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
114 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
115 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
116 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
117 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
118 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
119 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \
120 { \
121 char* data = nullptr; \
122 int len = 0; \
123 Symbol* clss_name = name(); \
124 if (clss_name != nullptr) { \
125 data = (char*)clss_name->bytes(); \
126 len = clss_name->utf8_length(); \
127 } \
128 HOTSPOT_CLASS_INITIALIZATION_##type( \
129 data, len, (void*)class_loader(), thread_type); \
130 }
131
132 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \
133 { \
134 char* data = nullptr; \
135 int len = 0; \
136 Symbol* clss_name = name(); \
137 if (clss_name != nullptr) { \
138 data = (char*)clss_name->bytes(); \
139 len = clss_name->utf8_length(); \
140 } \
141 HOTSPOT_CLASS_INITIALIZATION_##type( \
142 data, len, (void*)class_loader(), thread_type, wait); \
143 }
144
145 #else // ndef DTRACE_ENABLED
146
147 #define DTRACE_CLASSINIT_PROBE(type, thread_type)
148 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
149
150 #endif // ndef DTRACE_ENABLED
151
152 bool InstanceKlass::_finalization_enabled = true;
153
154 static inline bool is_class_loader(const Symbol* class_name,
155 const ClassFileParser& parser) {
156 assert(class_name != nullptr, "invariant");
157
158 if (class_name == vmSymbols::java_lang_ClassLoader()) {
159 return true;
160 }
161
162 if (vmClasses::ClassLoader_klass_is_loaded()) {
163 const Klass* const super_klass = parser.super_klass();
164 if (super_klass != nullptr) {
165 if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) {
166 return true;
167 }
168 }
169 }
170 return false;
171 }
172
173 static inline bool is_stack_chunk_class(const Symbol* class_name,
174 const ClassLoaderData* loader_data) {
175 return (class_name == vmSymbols::jdk_internal_vm_StackChunk() &&
176 loader_data->is_the_null_class_loader_data());
177 }
178
179 // private: called to verify that k is a static member of this nest.
180 // We know that k is an instance class in the same package and hence the
181 // same classloader.
182 bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const {
183 assert(!is_hidden(), "unexpected hidden class");
184 if (_nest_members == nullptr || _nest_members == Universe::the_empty_short_array()) {
185 if (log_is_enabled(Trace, class, nestmates)) {
186 ResourceMark rm(current);
187 log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s",
188 k->external_name(), this->external_name());
189 }
190 return false;
191 }
192
193 if (log_is_enabled(Trace, class, nestmates)) {
194 ResourceMark rm(current);
195 log_trace(class, nestmates)("Checking nest membership of %s in %s",
196 k->external_name(), this->external_name());
197 }
198
199 // Check for the named class in _nest_members.
200 // We don't resolve, or load, any classes.
201 for (int i = 0; i < _nest_members->length(); i++) {
202 int cp_index = _nest_members->at(i);
203 Symbol* name = _constants->klass_name_at(cp_index);
204 if (name == k->name()) {
205 log_trace(class, nestmates)("- named class found at nest_members[%d] => cp[%d]", i, cp_index);
206 return true;
207 }
208 }
209 log_trace(class, nestmates)("- class is NOT a nest member!");
210 return false;
211 }
212
213 // Called to verify that k is a permitted subclass of this class.
214 // The incoming stringStream is used to format the messages for error logging and for the caller
215 // to use for exception throwing.
216 bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k, stringStream& ss) const {
217 Thread* current = Thread::current();
218 assert(k != nullptr, "sanity check");
219 assert(_permitted_subclasses != nullptr && _permitted_subclasses != Universe::the_empty_short_array(),
220 "unexpected empty _permitted_subclasses array");
221
222 if (log_is_enabled(Trace, class, sealed)) {
223 ResourceMark rm(current);
224 log_trace(class, sealed)("Checking for permitted subclass %s in %s",
225 k->external_name(), this->external_name());
226 }
227
228 // Check that the class and its super are in the same module.
229 if (k->module() != this->module()) {
230 ss.print("Failed same module check: subclass %s is in module '%s' with loader %s, "
231 "and sealed class %s is in module '%s' with loader %s",
232 k->external_name(),
233 k->module()->name_as_C_string(),
234 k->module()->loader_data()->loader_name_and_id(),
235 this->external_name(),
236 this->module()->name_as_C_string(),
237 this->module()->loader_data()->loader_name_and_id());
238 log_trace(class, sealed)(" - %s", ss.as_string());
239 return false;
240 }
241
242 if (!k->is_public() && !is_same_class_package(k)) {
243 ss.print("Failed same package check: non-public subclass %s is in package '%s' with classloader %s, "
244 "and sealed class %s is in package '%s' with classloader %s",
245 k->external_name(),
246 k->package() != nullptr ? k->package()->name()->as_C_string() : "unnamed",
247 k->module()->loader_data()->loader_name_and_id(),
248 this->external_name(),
249 this->package() != nullptr ? this->package()->name()->as_C_string() : "unnamed",
250 this->module()->loader_data()->loader_name_and_id());
251 log_trace(class, sealed)(" - %s", ss.as_string());
252 return false;
253 }
254
255 for (int i = 0; i < _permitted_subclasses->length(); i++) {
256 int cp_index = _permitted_subclasses->at(i);
257 Symbol* name = _constants->klass_name_at(cp_index);
258 if (name == k->name()) {
259 log_trace(class, sealed)("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index);
260 return true;
261 }
262 }
263
264 ss.print("Failed listed permitted subclass check: class %s is not a permitted subclass of %s",
265 k->external_name(), this->external_name());
266 log_trace(class, sealed)(" - %s", ss.as_string());
267 return false;
268 }
269
270 // Return nest-host class, resolving, validating and saving it if needed.
271 // In cases where this is called from a thread that cannot do classloading
272 // (such as a native JIT thread) then we simply return null, which in turn
273 // causes the access check to return false. Such code will retry the access
274 // from a more suitable environment later. Otherwise the _nest_host is always
275 // set once this method returns.
276 // Any errors from nest-host resolution must be preserved so they can be queried
277 // from higher-level access checking code, and reported as part of access checking
278 // exceptions.
279 // VirtualMachineErrors are propagated with a null return.
280 // Under any conditions where the _nest_host can be set to non-null the resulting
281 // value of it and, if applicable, the nest host resolution/validation error,
282 // are idempotent.
283 InstanceKlass* InstanceKlass::nest_host(TRAPS) {
284 InstanceKlass* nest_host_k = _nest_host;
285 if (nest_host_k != nullptr) {
286 return nest_host_k;
287 }
288
289 ResourceMark rm(THREAD);
290
291 // need to resolve and save our nest-host class.
292 if (_nest_host_index != 0) { // we have a real nest_host
293 // Before trying to resolve check if we're in a suitable context
294 bool can_resolve = THREAD->can_call_java();
295 if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) {
296 log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread",
297 this->external_name());
298 return nullptr; // sentinel to say "try again from a different context"
299 }
300
301 log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s",
302 this->external_name(),
303 _constants->klass_name_at(_nest_host_index)->as_C_string());
304
305 Klass* k = _constants->klass_at(_nest_host_index, THREAD);
306 if (HAS_PENDING_EXCEPTION) {
307 if (PENDING_EXCEPTION->is_a(vmClasses::VirtualMachineError_klass())) {
308 return nullptr; // propagate VMEs
309 }
310 stringStream ss;
311 char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string();
312 ss.print("Nest host resolution of %s with host %s failed: ",
313 this->external_name(), target_host_class);
314 java_lang_Throwable::print(PENDING_EXCEPTION, &ss);
315 const char* msg = ss.as_string(true /* on C-heap */);
316 constantPoolHandle cph(THREAD, constants());
317 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
318 CLEAR_PENDING_EXCEPTION;
319
320 log_trace(class, nestmates)("%s", msg);
321 } else {
322 // A valid nest-host is an instance class in the current package that lists this
323 // class as a nest member. If any of these conditions are not met the class is
324 // its own nest-host.
325 const char* error = nullptr;
326
327 // JVMS 5.4.4 indicates package check comes first
328 if (is_same_class_package(k)) {
329 // Now check actual membership. We can't be a member if our "host" is
330 // not an instance class.
331 if (k->is_instance_klass()) {
332 nest_host_k = InstanceKlass::cast(k);
333 bool is_member = nest_host_k->has_nest_member(THREAD, this);
334 if (is_member) {
335 _nest_host = nest_host_k; // save resolved nest-host value
336
337 log_trace(class, nestmates)("Resolved nest-host of %s to %s",
338 this->external_name(), k->external_name());
339 return nest_host_k;
340 } else {
341 error = "current type is not listed as a nest member";
342 }
343 } else {
344 error = "host is not an instance class";
345 }
346 } else {
347 error = "types are in different packages";
348 }
349
350 // something went wrong, so record what and log it
351 {
352 stringStream ss;
353 ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s",
354 this->external_name(),
355 this->class_loader_data()->loader_name_and_id(),
356 k->external_name(),
357 k->class_loader_data()->loader_name_and_id(),
358 error);
359 const char* msg = ss.as_string(true /* on C-heap */);
360 constantPoolHandle cph(THREAD, constants());
361 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
362 log_trace(class, nestmates)("%s", msg);
363 }
364 }
365 } else {
366 log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self",
367 this->external_name());
368 }
369
370 // Either not in an explicit nest, or else an error occurred, so
371 // the nest-host is set to `this`. Any thread that sees this assignment
372 // will also see any setting of nest_host_error(), if applicable.
373 return (_nest_host = this);
374 }
375
376 // Dynamic nest member support: set this class's nest host to the given class.
377 // This occurs as part of the class definition, as soon as the instanceKlass
378 // has been created and doesn't require further resolution. The code:
379 // lookup().defineHiddenClass(bytes_for_X, NESTMATE);
380 // results in:
381 // class_of_X.set_nest_host(lookup().lookupClass().getNestHost())
382 // If it has an explicit _nest_host_index or _nest_members, these will be ignored.
383 // We also know the "host" is a valid nest-host in the same package so we can
384 // assert some of those facts.
385 void InstanceKlass::set_nest_host(InstanceKlass* host) {
386 assert(is_hidden(), "must be a hidden class");
387 assert(host != nullptr, "null nest host specified");
388 assert(_nest_host == nullptr, "current class has resolved nest-host");
389 assert(nest_host_error() == nullptr, "unexpected nest host resolution error exists: %s",
390 nest_host_error());
391 assert((host->_nest_host == nullptr && host->_nest_host_index == 0) ||
392 (host->_nest_host == host), "proposed host is not a valid nest-host");
393 // Can't assert this as package is not set yet:
394 // assert(is_same_class_package(host), "proposed host is in wrong package");
395
396 if (log_is_enabled(Trace, class, nestmates)) {
397 ResourceMark rm;
398 const char* msg = "";
399 // a hidden class does not expect a statically defined nest-host
400 if (_nest_host_index > 0) {
401 msg = "(the NestHost attribute in the current class is ignored)";
402 } else if (_nest_members != nullptr && _nest_members != Universe::the_empty_short_array()) {
403 msg = "(the NestMembers attribute in the current class is ignored)";
404 }
405 log_trace(class, nestmates)("Injected type %s into the nest of %s %s",
406 this->external_name(),
407 host->external_name(),
408 msg);
409 }
410 // set dynamic nest host
411 _nest_host = host;
412 // Record dependency to keep nest host from being unloaded before this class.
413 ClassLoaderData* this_key = class_loader_data();
414 assert(this_key != nullptr, "sanity");
415 this_key->record_dependency(host);
416 }
417
418 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
419 // or we are k's nest_host - all of which is covered by comparing the two
420 // resolved_nest_hosts.
421 // Any exceptions (i.e. VMEs) are propagated.
422 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) {
423
424 assert(this != k, "this should be handled by higher-level code");
425
426 // Per JVMS 5.4.4 we first resolve and validate the current class, then
427 // the target class k.
428
429 InstanceKlass* cur_host = nest_host(CHECK_false);
430 if (cur_host == nullptr) {
431 return false;
432 }
433
434 Klass* k_nest_host = k->nest_host(CHECK_false);
435 if (k_nest_host == nullptr) {
436 return false;
437 }
438
439 bool access = (cur_host == k_nest_host);
440
441 ResourceMark rm(THREAD);
442 log_trace(class, nestmates)("Class %s does %shave nestmate access to %s",
443 this->external_name(),
444 access ? "" : "NOT ",
445 k->external_name());
446 return access;
447 }
448
449 const char* InstanceKlass::nest_host_error() {
450 if (_nest_host_index == 0) {
451 return nullptr;
452 } else {
453 constantPoolHandle cph(Thread::current(), constants());
454 return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index);
455 }
456 }
457
458 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) {
459 const int size = InstanceKlass::size(parser.vtable_size(),
460 parser.itable_size(),
461 nonstatic_oop_map_size(parser.total_oop_map_count()),
462 parser.is_interface());
463
464 const Symbol* const class_name = parser.class_name();
465 assert(class_name != nullptr, "invariant");
466 ClassLoaderData* loader_data = parser.loader_data();
467 assert(loader_data != nullptr, "invariant");
468
469 InstanceKlass* ik;
470
471 // Allocation
472 if (parser.is_instance_ref_klass()) {
473 // java.lang.ref.Reference
474 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser);
475 } else if (class_name == vmSymbols::java_lang_Class()) {
476 // mirror - java.lang.Class
477 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser);
478 } else if (is_stack_chunk_class(class_name, loader_data)) {
479 // stack chunk
480 ik = new (loader_data, size, THREAD) InstanceStackChunkKlass(parser);
481 } else if (is_class_loader(class_name, parser)) {
482 // class loader - java.lang.ClassLoader
483 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser);
484 } else {
485 // normal
486 ik = new (loader_data, size, THREAD) InstanceKlass(parser);
487 }
488
489 if (ik != nullptr && UseCompressedClassPointers) {
490 assert(CompressedKlassPointers::is_encodable(ik),
491 "Klass " PTR_FORMAT "needs a narrow Klass ID, but is not encodable", p2i(ik));
492 }
493
494 // Check for pending exception before adding to the loader data and incrementing
495 // class count. Can get OOM here.
496 if (HAS_PENDING_EXCEPTION) {
497 return nullptr;
498 }
499
500 return ik;
501 }
502
503
504 // copy method ordering from resource area to Metaspace
505 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) {
506 if (m != nullptr) {
507 // allocate a new array and copy contents (memcpy?)
508 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
509 for (int i = 0; i < m->length(); i++) {
510 _method_ordering->at_put(i, m->at(i));
511 }
512 } else {
513 _method_ordering = Universe::the_empty_int_array();
514 }
515 }
516
517 // create a new array of vtable_indices for default methods
518 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
519 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
520 assert(default_vtable_indices() == nullptr, "only create once");
521 set_default_vtable_indices(vtable_indices);
522 return vtable_indices;
523 }
524
525
526 InstanceKlass::InstanceKlass() {
527 assert(CDSConfig::is_dumping_static_archive() || CDSConfig::is_using_archive(), "only for CDS");
528 }
529
530 InstanceKlass::InstanceKlass(const ClassFileParser& parser, KlassKind kind, ReferenceType reference_type) :
531 Klass(kind),
532 _nest_members(nullptr),
533 _nest_host(nullptr),
534 _permitted_subclasses(nullptr),
535 _record_components(nullptr),
536 _static_field_size(parser.static_field_size()),
537 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
538 _itable_len(parser.itable_size()),
539 _nest_host_index(0),
540 _init_state(allocated),
541 _reference_type(reference_type),
542 _init_thread(nullptr),
543 _hash_offset(parser.hash_offset())
544 {
545 set_vtable_length(parser.vtable_size());
546 set_access_flags(parser.access_flags());
547 if (parser.is_hidden()) set_is_hidden();
548 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
549 false));
550
551 assert(nullptr == _methods, "underlying memory not zeroed?");
552 assert(is_instance_klass(), "is layout incorrect?");
553 assert(size_helper() == parser.layout_size(), "incorrect size_helper?");
554 }
555
556 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
557 Array<Method*>* methods) {
558 if (methods != nullptr && methods != Universe::the_empty_method_array() &&
559 !methods->in_aot_cache()) {
560 for (int i = 0; i < methods->length(); i++) {
561 Method* method = methods->at(i);
562 if (method == nullptr) continue; // maybe null if error processing
563 // Only want to delete methods that are not executing for RedefineClasses.
564 // The previous version will point to them so they're not totally dangling
565 assert (!method->on_stack(), "shouldn't be called with methods on stack");
566 MetadataFactory::free_metadata(loader_data, method);
567 }
568 MetadataFactory::free_array<Method*>(loader_data, methods);
569 }
570 }
571
572 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
573 const InstanceKlass* super_klass,
574 Array<InstanceKlass*>* local_interfaces,
575 Array<InstanceKlass*>* transitive_interfaces) {
576 // Only deallocate transitive interfaces if not empty, same as super class
577 // or same as local interfaces. See code in parseClassFile.
578 Array<InstanceKlass*>* ti = transitive_interfaces;
579 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
580 // check that the interfaces don't come from super class
581 Array<InstanceKlass*>* sti = (super_klass == nullptr) ? nullptr :
582 super_klass->transitive_interfaces();
583 if (ti != sti && ti != nullptr && !ti->in_aot_cache()) {
584 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
585 }
586 }
587
588 // local interfaces can be empty
589 if (local_interfaces != Universe::the_empty_instance_klass_array() &&
590 local_interfaces != nullptr && !local_interfaces->in_aot_cache()) {
591 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
592 }
593 }
594
595 void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
596 Array<RecordComponent*>* record_components) {
597 if (record_components != nullptr && !record_components->in_aot_cache()) {
598 for (int i = 0; i < record_components->length(); i++) {
599 RecordComponent* record_component = record_components->at(i);
600 MetadataFactory::free_metadata(loader_data, record_component);
601 }
602 MetadataFactory::free_array<RecordComponent*>(loader_data, record_components);
603 }
604 }
605
606 // This function deallocates the metadata and C heap pointers that the
607 // InstanceKlass points to.
608 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
609 // Orphan the mirror first, CMS thinks it's still live.
610 if (java_mirror() != nullptr) {
611 java_lang_Class::set_klass(java_mirror(), nullptr);
612 }
613
614 // Also remove mirror from handles
615 loader_data->remove_handle(_java_mirror);
616
617 // Need to take this class off the class loader data list.
618 loader_data->remove_class(this);
619
620 // The array_klass for this class is created later, after error handling.
621 // For class redefinition, we keep the original class so this scratch class
622 // doesn't have an array class. Either way, assert that there is nothing
623 // to deallocate.
624 assert(array_klasses() == nullptr, "array classes shouldn't be created for this class yet");
625
626 // Release C heap allocated data that this points to, which includes
627 // reference counting symbol names.
628 // Can't release the constant pool or MethodData C heap data here because the constant
629 // pool can be deallocated separately from the InstanceKlass for default methods and
630 // redefine classes. MethodData can also be released separately.
631 release_C_heap_structures(/* release_sub_metadata */ false);
632
633 deallocate_methods(loader_data, methods());
634 set_methods(nullptr);
635
636 deallocate_record_components(loader_data, record_components());
637 set_record_components(nullptr);
638
639 if (method_ordering() != nullptr &&
640 method_ordering() != Universe::the_empty_int_array() &&
641 !method_ordering()->in_aot_cache()) {
642 MetadataFactory::free_array<int>(loader_data, method_ordering());
643 }
644 set_method_ordering(nullptr);
645
646 // default methods can be empty
647 if (default_methods() != nullptr &&
648 default_methods() != Universe::the_empty_method_array() &&
649 !default_methods()->in_aot_cache()) {
650 MetadataFactory::free_array<Method*>(loader_data, default_methods());
651 }
652 // Do NOT deallocate the default methods, they are owned by superinterfaces.
653 set_default_methods(nullptr);
654
655 // default methods vtable indices can be empty
656 if (default_vtable_indices() != nullptr &&
657 !default_vtable_indices()->in_aot_cache()) {
658 MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
659 }
660 set_default_vtable_indices(nullptr);
661
662
663 // This array is in Klass, but remove it with the InstanceKlass since
664 // this place would be the only caller and it can share memory with transitive
665 // interfaces.
666 if (secondary_supers() != nullptr &&
667 secondary_supers() != Universe::the_empty_klass_array() &&
668 // see comments in compute_secondary_supers about the following cast
669 (address)(secondary_supers()) != (address)(transitive_interfaces()) &&
670 !secondary_supers()->in_aot_cache()) {
671 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
672 }
673 set_secondary_supers(nullptr, SECONDARY_SUPERS_BITMAP_EMPTY);
674
675 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
676 set_transitive_interfaces(nullptr);
677 set_local_interfaces(nullptr);
678
679 if (fieldinfo_stream() != nullptr && !fieldinfo_stream()->in_aot_cache()) {
680 MetadataFactory::free_array<u1>(loader_data, fieldinfo_stream());
681 }
682 set_fieldinfo_stream(nullptr);
683
684 if (fieldinfo_search_table() != nullptr && !fieldinfo_search_table()->in_aot_cache()) {
685 MetadataFactory::free_array<u1>(loader_data, fieldinfo_search_table());
686 }
687 set_fieldinfo_search_table(nullptr);
688
689 if (fields_status() != nullptr && !fields_status()->in_aot_cache()) {
690 MetadataFactory::free_array<FieldStatus>(loader_data, fields_status());
691 }
692 set_fields_status(nullptr);
693
694 // If a method from a redefined class is using this constant pool, don't
695 // delete it, yet. The new class's previous version will point to this.
696 if (constants() != nullptr) {
697 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
698 if (!constants()->in_aot_cache()) {
699 MetadataFactory::free_metadata(loader_data, constants());
700 }
701 // Delete any cached resolution errors for the constant pool
702 SystemDictionary::delete_resolution_error(constants());
703
704 set_constants(nullptr);
705 }
706
707 if (inner_classes() != nullptr &&
708 inner_classes() != Universe::the_empty_short_array() &&
709 !inner_classes()->in_aot_cache()) {
710 MetadataFactory::free_array<jushort>(loader_data, inner_classes());
711 }
712 set_inner_classes(nullptr);
713
714 if (nest_members() != nullptr &&
715 nest_members() != Universe::the_empty_short_array() &&
716 !nest_members()->in_aot_cache()) {
717 MetadataFactory::free_array<jushort>(loader_data, nest_members());
718 }
719 set_nest_members(nullptr);
720
721 if (permitted_subclasses() != nullptr &&
722 permitted_subclasses() != Universe::the_empty_short_array() &&
723 !permitted_subclasses()->in_aot_cache()) {
724 MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
725 }
726 set_permitted_subclasses(nullptr);
727
728 // We should deallocate the Annotations instance if it's not in shared spaces.
729 if (annotations() != nullptr && !annotations()->in_aot_cache()) {
730 MetadataFactory::free_metadata(loader_data, annotations());
731 }
732 set_annotations(nullptr);
733
734 SystemDictionaryShared::handle_class_unloading(this);
735
736 #if INCLUDE_CDS_JAVA_HEAP
737 if (CDSConfig::is_dumping_heap()) {
738 HeapShared::remove_scratch_objects(this);
739 }
740 #endif
741 }
742
743 bool InstanceKlass::is_record() const {
744 return _record_components != nullptr &&
745 is_final() &&
746 super() == vmClasses::Record_klass();
747 }
748
749 bool InstanceKlass::is_sealed() const {
750 return _permitted_subclasses != nullptr &&
751 _permitted_subclasses != Universe::the_empty_short_array();
752 }
753
754 // JLS 8.9: An enum class is either implicitly final and derives
755 // from java.lang.Enum, or else is implicitly sealed to its
756 // anonymous subclasses. This query detects both kinds.
757 // It does not validate the finality or
758 // sealing conditions: it merely checks for a super of Enum.
759 // This is sufficient for recognizing well-formed enums.
760 bool InstanceKlass::is_enum_subclass() const {
761 InstanceKlass* s = super();
762 return (s == vmClasses::Enum_klass() ||
763 (s != nullptr && s->super() == vmClasses::Enum_klass()));
764 }
765
766 bool InstanceKlass::should_be_initialized() const {
767 return !is_initialized();
768 }
769
770 klassItable InstanceKlass::itable() const {
771 return klassItable(const_cast<InstanceKlass*>(this));
772 }
773
774 // JVMTI spec thinks there are signers and protection domain in the
775 // instanceKlass. These accessors pretend these fields are there.
776 // The hprof specification also thinks these fields are in InstanceKlass.
777 oop InstanceKlass::protection_domain() const {
778 // return the protection_domain from the mirror
779 return java_lang_Class::protection_domain(java_mirror());
780 }
781
782 objArrayOop InstanceKlass::signers() const {
783 // return the signers from the mirror
784 return java_lang_Class::signers(java_mirror());
785 }
786
787 oop InstanceKlass::init_lock() const {
788 // return the init lock from the mirror
789 oop lock = java_lang_Class::init_lock(java_mirror());
790 // Prevent reordering with any access of initialization state
791 OrderAccess::loadload();
792 assert(lock != nullptr || !is_not_initialized(), // initialized or in_error state
793 "only fully initialized state can have a null lock");
794 return lock;
795 }
796
797 // Set the initialization lock to null so the object can be GC'ed. Any racing
798 // threads to get this lock will see a null lock and will not lock.
799 // That's okay because they all check for initialized state after getting
800 // the lock and return. For preempted vthreads we keep the oop protected
801 // in the ObjectMonitor (see ObjectMonitor::set_object_strong()).
802 void InstanceKlass::fence_and_clear_init_lock() {
803 // make sure previous stores are all done, notably the init_state.
804 OrderAccess::storestore();
805 java_lang_Class::clear_init_lock(java_mirror());
806 assert(!is_not_initialized(), "class must be initialized now");
807 }
808
809 class PreemptableInitCall {
810 JavaThread* _thread;
811 bool _previous;
812 DEBUG_ONLY(InstanceKlass* _previous_klass;)
813 public:
814 PreemptableInitCall(JavaThread* thread, InstanceKlass* ik) : _thread(thread) {
815 _previous = thread->at_preemptable_init();
816 _thread->set_at_preemptable_init(true);
817 DEBUG_ONLY(_previous_klass = _thread->preempt_init_klass();)
818 DEBUG_ONLY(_thread->set_preempt_init_klass(ik));
819 }
820 ~PreemptableInitCall() {
821 _thread->set_at_preemptable_init(_previous);
822 DEBUG_ONLY(_thread->set_preempt_init_klass(_previous_klass));
823 }
824 };
825
826 void InstanceKlass::initialize_preemptable(TRAPS) {
827 if (this->should_be_initialized()) {
828 PreemptableInitCall pic(THREAD, this);
829 initialize_impl(THREAD);
830 } else {
831 assert(is_initialized(), "sanity check");
832 }
833 }
834
835 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
836 // process. The step comments refers to the procedure described in that section.
837 // Note: implementation moved to static method to expose the this pointer.
838 void InstanceKlass::initialize(TRAPS) {
839 if (this->should_be_initialized()) {
840 initialize_impl(CHECK);
841 // Note: at this point the class may be initialized
842 // OR it may be in the state of being initialized
843 // in case of recursive initialization!
844 } else {
845 assert(is_initialized(), "sanity check");
846 }
847 }
848
849 #ifdef ASSERT
850 void InstanceKlass::assert_no_clinit_will_run_for_aot_initialized_class() const {
851 assert(has_aot_initialized_mirror(), "must be");
852
853 InstanceKlass* s = super();
854 if (s != nullptr) {
855 DEBUG_ONLY(ResourceMark rm);
856 assert(s->is_initialized(), "super class %s of aot-inited class %s must have been initialized",
857 s->external_name(), external_name());
858 s->assert_no_clinit_will_run_for_aot_initialized_class();
859 }
860
861 Array<InstanceKlass*>* interfaces = local_interfaces();
862 int len = interfaces->length();
863 for (int i = 0; i < len; i++) {
864 InstanceKlass* intf = interfaces->at(i);
865 if (!intf->is_initialized()) {
866 ResourceMark rm;
867 // Note: an interface needs to be marked as is_initialized() only if
868 // - it has a <clinit>
869 // - it has declared a default method.
870 assert(!intf->interface_needs_clinit_execution_as_super(/*also_check_supers*/false),
871 "uninitialized super interface %s of aot-inited class %s must not have <clinit>",
872 intf->external_name(), external_name());
873 }
874 }
875 }
876 #endif
877
878 #if INCLUDE_CDS
879 void InstanceKlass::initialize_with_aot_initialized_mirror(TRAPS) {
880 assert(has_aot_initialized_mirror(), "must be");
881 assert(CDSConfig::is_loading_heap(), "must be");
882 assert(CDSConfig::is_using_aot_linked_classes(), "must be");
883 assert_no_clinit_will_run_for_aot_initialized_class();
884
885 if (is_initialized()) {
886 return;
887 }
888
889 if (is_runtime_setup_required()) {
890 // Need to take the slow path, which will call the runtimeSetup() function instead
891 // of <clinit>
892 initialize(CHECK);
893 return;
894 }
895 if (log_is_enabled(Info, aot, init)) {
896 ResourceMark rm;
897 log_info(aot, init)("%s (aot-inited)", external_name());
898 }
899
900 link_class(CHECK);
901
902 #ifdef ASSERT
903 {
904 Handle h_init_lock(THREAD, init_lock());
905 ObjectLocker ol(h_init_lock, THREAD);
906 assert(!is_initialized(), "sanity");
907 assert(!is_being_initialized(), "sanity");
908 assert(!is_in_error_state(), "sanity");
909 }
910 #endif
911
912 set_init_thread(THREAD);
913 set_initialization_state_and_notify(fully_initialized, CHECK);
914 }
915 #endif
916
917 bool InstanceKlass::verify_code(TRAPS) {
918 // 1) Verify the bytecodes
919 return Verifier::verify(this, should_verify_class(), THREAD);
920 }
921
922 void InstanceKlass::link_class(TRAPS) {
923 assert(is_loaded(), "must be loaded");
924 if (!is_linked()) {
925 link_class_impl(CHECK);
926 }
927 }
928
929 // Called to verify that a class can link during initialization, without
930 // throwing a VerifyError.
931 bool InstanceKlass::link_class_or_fail(TRAPS) {
932 assert(is_loaded(), "must be loaded");
933 if (!is_linked()) {
934 link_class_impl(CHECK_false);
935 }
936 return is_linked();
937 }
938
939 bool InstanceKlass::link_class_impl(TRAPS) {
940 if (CDSConfig::is_dumping_static_archive() && SystemDictionaryShared::has_class_failed_verification(this)) {
941 // This is for CDS static dump only -- we use the in_error_state to indicate that
942 // the class has failed verification. Throwing the NoClassDefFoundError here is just
943 // a convenient way to stop repeat attempts to verify the same (bad) class.
944 //
945 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
946 // if we are executing Java code. This is not a problem for CDS dumping phase since
947 // it doesn't execute any Java code.
948 ResourceMark rm(THREAD);
949 // Names are all known to be < 64k so we know this formatted message is not excessively large.
950 Exceptions::fthrow(THREAD_AND_LOCATION,
951 vmSymbols::java_lang_NoClassDefFoundError(),
952 "Class %s, or one of its supertypes, failed class initialization",
953 external_name());
954 return false;
955 }
956 // return if already verified
957 if (is_linked()) {
958 return true;
959 }
960
961 // Timing
962 // timer handles recursion
963 JavaThread* jt = THREAD;
964
965 // link super class before linking this class
966 InstanceKlass* super_klass = super();
967 if (super_klass != nullptr) {
968 if (super_klass->is_interface()) { // check if super class is an interface
969 ResourceMark rm(THREAD);
970 // Names are all known to be < 64k so we know this formatted message is not excessively large.
971 Exceptions::fthrow(
972 THREAD_AND_LOCATION,
973 vmSymbols::java_lang_IncompatibleClassChangeError(),
974 "class %s has interface %s as super class",
975 external_name(),
976 super_klass->external_name()
977 );
978 return false;
979 }
980
981 super_klass->link_class_impl(CHECK_false);
982 }
983
984 // link all interfaces implemented by this class before linking this class
985 Array<InstanceKlass*>* interfaces = local_interfaces();
986 int num_interfaces = interfaces->length();
987 for (int index = 0; index < num_interfaces; index++) {
988 InstanceKlass* interk = interfaces->at(index);
989 interk->link_class_impl(CHECK_false);
990 }
991
992 // in case the class is linked in the process of linking its superclasses
993 if (is_linked()) {
994 return true;
995 }
996
997 // trace only the link time for this klass that includes
998 // the verification time
999 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
1000 ClassLoader::perf_class_link_selftime(),
1001 ClassLoader::perf_classes_linked(),
1002 jt->get_thread_stat()->perf_recursion_counts_addr(),
1003 jt->get_thread_stat()->perf_timers_addr(),
1004 PerfClassTraceTime::CLASS_LINK);
1005
1006 // verification & rewriting
1007 {
1008 HandleMark hm(THREAD);
1009 Handle h_init_lock(THREAD, init_lock());
1010 ObjectLocker ol(h_init_lock, CHECK_PREEMPTABLE_false);
1011 // Don't allow preemption if we link/initialize classes below,
1012 // since that would release this monitor while we are in the
1013 // middle of linking this class.
1014 NoPreemptMark npm(THREAD);
1015
1016 // rewritten will have been set if loader constraint error found
1017 // on an earlier link attempt
1018 // don't verify or rewrite if already rewritten
1019 //
1020
1021 if (!is_linked()) {
1022 if (!is_rewritten()) {
1023 if (in_aot_cache()) {
1024 assert(!verified_at_dump_time(), "must be");
1025 }
1026 {
1027 bool verify_ok = verify_code(THREAD);
1028 if (!verify_ok) {
1029 return false;
1030 }
1031 }
1032
1033 // Just in case a side-effect of verify linked this class already
1034 // (which can sometimes happen since the verifier loads classes
1035 // using custom class loaders, which are free to initialize things)
1036 if (is_linked()) {
1037 return true;
1038 }
1039
1040 // also sets rewritten
1041 rewrite_class(CHECK_false);
1042 } else if (in_aot_cache()) {
1043 SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
1044 }
1045
1046 // relocate jsrs and link methods after they are all rewritten
1047 link_methods(CHECK_false);
1048
1049 // Initialize the vtable and interface table after
1050 // methods have been rewritten since rewrite may
1051 // fabricate new Method*s.
1052 // also does loader constraint checking
1053 //
1054 // initialize_vtable and initialize_itable need to be rerun
1055 // for a shared class if
1056 // 1) the class is loaded by custom class loader or
1057 // 2) the class is loaded by built-in class loader but failed to add archived loader constraints or
1058 // 3) the class was not verified during dump time
1059 bool need_init_table = true;
1060 if (in_aot_cache() && verified_at_dump_time() &&
1061 SystemDictionaryShared::check_linking_constraints(THREAD, this)) {
1062 need_init_table = false;
1063 }
1064 if (need_init_table) {
1065 vtable().initialize_vtable_and_check_constraints(CHECK_false);
1066 itable().initialize_itable_and_check_constraints(CHECK_false);
1067 }
1068 #ifdef ASSERT
1069 vtable().verify(tty, true);
1070 // In case itable verification is ever added.
1071 // itable().verify(tty, true);
1072 #endif
1073 if (Universe::is_fully_initialized()) {
1074 DeoptimizationScope deopt_scope;
1075 {
1076 // Now mark all code that assumes the class is not linked.
1077 // Set state under the Compile_lock also.
1078 MutexLocker ml(THREAD, Compile_lock);
1079
1080 set_init_state(linked);
1081 CodeCache::mark_dependents_on(&deopt_scope, this);
1082 }
1083 // Perform the deopt handshake outside Compile_lock.
1084 deopt_scope.deoptimize_marked();
1085 } else {
1086 set_init_state(linked);
1087 }
1088 if (JvmtiExport::should_post_class_prepare()) {
1089 JvmtiExport::post_class_prepare(THREAD, this);
1090 }
1091 }
1092 }
1093 return true;
1094 }
1095
1096 // Rewrite the byte codes of all of the methods of a class.
1097 // The rewriter must be called exactly once. Rewriting must happen after
1098 // verification but before the first method of the class is executed.
1099 void InstanceKlass::rewrite_class(TRAPS) {
1100 assert(is_loaded(), "must be loaded");
1101 if (is_rewritten()) {
1102 assert(in_aot_cache(), "rewriting an unshared class?");
1103 return;
1104 }
1105 Rewriter::rewrite(this, CHECK);
1106 set_rewritten();
1107 }
1108
1109 // Now relocate and link method entry points after class is rewritten.
1110 // This is outside is_rewritten flag. In case of an exception, it can be
1111 // executed more than once.
1112 void InstanceKlass::link_methods(TRAPS) {
1113 PerfTraceTime timer(ClassLoader::perf_ik_link_methods_time());
1114
1115 int len = methods()->length();
1116 for (int i = len-1; i >= 0; i--) {
1117 methodHandle m(THREAD, methods()->at(i));
1118
1119 // Set up method entry points for compiler and interpreter .
1120 m->link_method(m, CHECK);
1121 }
1122 }
1123
1124 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
1125 void InstanceKlass::initialize_super_interfaces(TRAPS) {
1126 assert (has_nonstatic_concrete_methods(), "caller should have checked this");
1127 for (int i = 0; i < local_interfaces()->length(); ++i) {
1128 InstanceKlass* ik = local_interfaces()->at(i);
1129
1130 // Initialization is depth first search ie. we start with top of the inheritance tree
1131 // has_nonstatic_concrete_methods drives searching superinterfaces since it
1132 // means has_nonstatic_concrete_methods in its superinterface hierarchy
1133 if (ik->has_nonstatic_concrete_methods()) {
1134 ik->initialize_super_interfaces(CHECK);
1135 }
1136
1137 // Only initialize() interfaces that "declare" concrete methods.
1138 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
1139 ik->initialize(CHECK);
1140 }
1141 }
1142 }
1143
1144 using InitializationErrorTable = HashTable<const InstanceKlass*, OopHandle, 107, AnyObj::C_HEAP, mtClass>;
1145 static InitializationErrorTable* _initialization_error_table;
1146
1147 void InstanceKlass::add_initialization_error(JavaThread* current, Handle exception) {
1148 // Create the same exception with a message indicating the thread name,
1149 // and the StackTraceElements.
1150 Handle init_error = java_lang_Throwable::create_initialization_error(current, exception);
1151 ResourceMark rm(current);
1152 if (init_error.is_null()) {
1153 log_trace(class, init)("Unable to create the desired initialization error for class %s", external_name());
1154
1155 // We failed to create the new exception, most likely due to either out-of-memory or
1156 // a stackoverflow error. If the original exception was either of those then we save
1157 // the shared, pre-allocated, stackless, instance of that exception.
1158 if (exception->klass() == vmClasses::StackOverflowError_klass()) {
1159 log_debug(class, init)("Using shared StackOverflowError as initialization error for class %s", external_name());
1160 init_error = Handle(current, Universe::class_init_stack_overflow_error());
1161 } else if (exception->klass() == vmClasses::OutOfMemoryError_klass()) {
1162 log_debug(class, init)("Using shared OutOfMemoryError as initialization error for class %s", external_name());
1163 init_error = Handle(current, Universe::class_init_out_of_memory_error());
1164 } else {
1165 return;
1166 }
1167 }
1168
1169 MutexLocker ml(current, ClassInitError_lock);
1170 OopHandle elem = OopHandle(Universe::vm_global(), init_error());
1171 bool created;
1172 if (_initialization_error_table == nullptr) {
1173 _initialization_error_table = new (mtClass) InitializationErrorTable();
1174 }
1175 _initialization_error_table->put_if_absent(this, elem, &created);
1176 assert(created, "Initialization is single threaded");
1177 log_trace(class, init)("Initialization error added for class %s", external_name());
1178 }
1179
1180 oop InstanceKlass::get_initialization_error(JavaThread* current) {
1181 MutexLocker ml(current, ClassInitError_lock);
1182 if (_initialization_error_table == nullptr) {
1183 return nullptr;
1184 }
1185 OopHandle* h = _initialization_error_table->get(this);
1186 return (h != nullptr) ? h->resolve() : nullptr;
1187 }
1188
1189 // Need to remove entries for unloaded classes.
1190 void InstanceKlass::clean_initialization_error_table() {
1191 struct InitErrorTableCleaner {
1192 bool do_entry(const InstanceKlass* ik, OopHandle h) {
1193 if (!ik->is_loader_alive()) {
1194 h.release(Universe::vm_global());
1195 return true;
1196 } else {
1197 return false;
1198 }
1199 }
1200 };
1201
1202 assert_locked_or_safepoint(ClassInitError_lock);
1203 InitErrorTableCleaner cleaner;
1204 if (_initialization_error_table != nullptr) {
1205 _initialization_error_table->unlink(&cleaner);
1206 }
1207 }
1208
1209 class ThreadWaitingForClassInit : public StackObj {
1210 JavaThread* _thread;
1211 public:
1212 ThreadWaitingForClassInit(JavaThread* thread, InstanceKlass* ik) : _thread(thread) {
1213 _thread->set_class_to_be_initialized(ik);
1214 }
1215 ~ThreadWaitingForClassInit() {
1216 _thread->set_class_to_be_initialized(nullptr);
1217 }
1218 };
1219
1220 void InstanceKlass::initialize_impl(TRAPS) {
1221 HandleMark hm(THREAD);
1222
1223 // Make sure klass is linked (verified) before initialization
1224 // A class could already be verified, since it has been reflected upon.
1225 link_class(CHECK);
1226
1227 DTRACE_CLASSINIT_PROBE(required, -1);
1228
1229 bool wait = false;
1230
1231 JavaThread* jt = THREAD;
1232
1233 bool debug_logging_enabled = log_is_enabled(Debug, class, init);
1234
1235 // refer to the JVM book page 47 for description of steps
1236 // Step 1
1237 {
1238 Handle h_init_lock(THREAD, init_lock());
1239 ObjectLocker ol(h_init_lock, CHECK_PREEMPTABLE);
1240
1241 // Step 2
1242 // If we were to use wait() instead of waitInterruptibly() then
1243 // we might end up throwing IE from link/symbol resolution sites
1244 // that aren't expected to throw. This would wreak havoc. See 6320309.
1245 while (is_being_initialized() && !is_reentrant_initialization(jt)) {
1246 if (debug_logging_enabled) {
1247 ResourceMark rm(jt);
1248 log_debug(class, init)("Thread \"%s\" waiting for initialization of %s by thread \"%s\"",
1249 jt->name(), external_name(), init_thread_name());
1250 }
1251 wait = true;
1252 ThreadWaitingForClassInit twcl(THREAD, this);
1253 ol.wait_uninterruptibly(CHECK_PREEMPTABLE);
1254 }
1255
1256 // Step 3
1257 if (is_being_initialized() && is_reentrant_initialization(jt)) {
1258 if (debug_logging_enabled) {
1259 ResourceMark rm(jt);
1260 log_debug(class, init)("Thread \"%s\" recursively initializing %s",
1261 jt->name(), external_name());
1262 }
1263 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
1264 return;
1265 }
1266
1267 // Step 4
1268 if (is_initialized()) {
1269 if (debug_logging_enabled) {
1270 ResourceMark rm(jt);
1271 log_debug(class, init)("Thread \"%s\" found %s already initialized",
1272 jt->name(), external_name());
1273 }
1274 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
1275 return;
1276 }
1277
1278 // Step 5
1279 if (is_in_error_state()) {
1280 if (debug_logging_enabled) {
1281 ResourceMark rm(jt);
1282 log_debug(class, init)("Thread \"%s\" found %s is in error state",
1283 jt->name(), external_name());
1284 }
1285
1286 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
1287 ResourceMark rm(THREAD);
1288 Handle cause(THREAD, get_initialization_error(THREAD));
1289
1290 stringStream ss;
1291 ss.print("Could not initialize class %s", external_name());
1292 if (cause.is_null()) {
1293 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string());
1294 } else {
1295 THROW_MSG_CAUSE(vmSymbols::java_lang_NoClassDefFoundError(),
1296 ss.as_string(), cause);
1297 }
1298 } else {
1299
1300 // Step 6
1301 set_init_state(being_initialized);
1302 set_init_thread(jt);
1303 if (debug_logging_enabled) {
1304 ResourceMark rm(jt);
1305 log_debug(class, init)("Thread \"%s\" is initializing %s",
1306 jt->name(), external_name());
1307 }
1308 }
1309 }
1310
1311 // Block preemption once we are the initializer thread. Unmounting now
1312 // would complicate the reentrant case (identity is platform thread).
1313 NoPreemptMark npm(THREAD);
1314
1315 // Step 7
1316 // Next, if C is a class rather than an interface, initialize it's super class and super
1317 // interfaces.
1318 if (!is_interface()) {
1319 Klass* super_klass = super();
1320 if (super_klass != nullptr && super_klass->should_be_initialized()) {
1321 super_klass->initialize(THREAD);
1322 }
1323 // If C implements any interface that declares a non-static, concrete method,
1324 // the initialization of C triggers initialization of its super interfaces.
1325 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
1326 // having a superinterface that declares, non-static, concrete methods
1327 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) {
1328 initialize_super_interfaces(THREAD);
1329 }
1330
1331 // If any exceptions, complete abruptly, throwing the same exception as above.
1332 if (HAS_PENDING_EXCEPTION) {
1333 Handle e(THREAD, PENDING_EXCEPTION);
1334 CLEAR_PENDING_EXCEPTION;
1335 {
1336 EXCEPTION_MARK;
1337 add_initialization_error(THREAD, e);
1338 // Locks object, set state, and notify all waiting threads
1339 set_initialization_state_and_notify(initialization_error, THREAD);
1340 CLEAR_PENDING_EXCEPTION;
1341 }
1342 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1343 THROW_OOP(e());
1344 }
1345 }
1346
1347
1348 // Step 8
1349 {
1350 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1351 if (class_initializer() != nullptr) {
1352 // Timer includes any side effects of class initialization (resolution,
1353 // etc), but not recursive entry into call_class_initializer().
1354 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1355 ClassLoader::perf_class_init_selftime(),
1356 ClassLoader::perf_classes_inited(),
1357 jt->get_thread_stat()->perf_recursion_counts_addr(),
1358 jt->get_thread_stat()->perf_timers_addr(),
1359 PerfClassTraceTime::CLASS_CLINIT);
1360 call_class_initializer(THREAD);
1361 } else {
1362 // The elapsed time is so small it's not worth counting.
1363 if (UsePerfData) {
1364 ClassLoader::perf_classes_inited()->inc();
1365 }
1366 call_class_initializer(THREAD);
1367 }
1368 }
1369
1370 // Step 9
1371 if (!HAS_PENDING_EXCEPTION) {
1372 set_initialization_state_and_notify(fully_initialized, CHECK);
1373 DEBUG_ONLY(vtable().verify(tty, true);)
1374 CompilationPolicy::replay_training_at_init(this, THREAD);
1375 }
1376 else {
1377 // Step 10 and 11
1378 Handle e(THREAD, PENDING_EXCEPTION);
1379 CLEAR_PENDING_EXCEPTION;
1380 // JVMTI has already reported the pending exception
1381 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1382 JvmtiExport::clear_detected_exception(jt);
1383 {
1384 EXCEPTION_MARK;
1385 add_initialization_error(THREAD, e);
1386 set_initialization_state_and_notify(initialization_error, THREAD);
1387 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below
1388 // JVMTI has already reported the pending exception
1389 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1390 JvmtiExport::clear_detected_exception(jt);
1391 }
1392 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1393 if (e->is_a(vmClasses::Error_klass())) {
1394 THROW_OOP(e());
1395 } else {
1396 JavaCallArguments args(e);
1397 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
1398 vmSymbols::throwable_void_signature(),
1399 &args);
1400 }
1401 }
1402 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1403 }
1404
1405
1406 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
1407 Handle h_init_lock(THREAD, init_lock());
1408 if (h_init_lock() != nullptr) {
1409 ObjectLocker ol(h_init_lock, THREAD);
1410 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1411 set_init_state(state);
1412 fence_and_clear_init_lock();
1413 ol.notify_all(CHECK);
1414 } else {
1415 assert(h_init_lock() != nullptr, "The initialization state should never be set twice");
1416 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1417 set_init_state(state);
1418 }
1419 }
1420
1421 // Update hierarchy. This is done before the new klass has been added to the SystemDictionary. The Compile_lock
1422 // is grabbed, to ensure that the compiler is not using the class hierarchy.
1423 void InstanceKlass::add_to_hierarchy(JavaThread* current) {
1424 assert(!SafepointSynchronize::is_at_safepoint(), "must NOT be at safepoint");
1425
1426 DeoptimizationScope deopt_scope;
1427 {
1428 MutexLocker ml(current, Compile_lock);
1429
1430 set_init_state(InstanceKlass::loaded);
1431 // make sure init_state store is already done.
1432 // The compiler reads the hierarchy outside of the Compile_lock.
1433 // Access ordering is used to add to hierarchy.
1434
1435 // Link into hierarchy.
1436 append_to_sibling_list(); // add to superklass/sibling list
1437 process_interfaces(); // handle all "implements" declarations
1438
1439 // Now mark all code that depended on old class hierarchy.
1440 // Note: must be done *after* linking k into the hierarchy (was bug 12/9/97)
1441 if (Universe::is_fully_initialized()) {
1442 CodeCache::mark_dependents_on(&deopt_scope, this);
1443 }
1444 }
1445 // Perform the deopt handshake outside Compile_lock.
1446 deopt_scope.deoptimize_marked();
1447 }
1448
1449
1450 InstanceKlass* InstanceKlass::implementor() const {
1451 InstanceKlass* volatile* ik = adr_implementor();
1452 if (ik == nullptr) {
1453 return nullptr;
1454 } else {
1455 // This load races with inserts, and therefore needs acquire.
1456 InstanceKlass* ikls = AtomicAccess::load_acquire(ik);
1457 if (ikls != nullptr && !ikls->is_loader_alive()) {
1458 return nullptr; // don't return unloaded class
1459 } else {
1460 return ikls;
1461 }
1462 }
1463 }
1464
1465
1466 void InstanceKlass::set_implementor(InstanceKlass* ik) {
1467 assert_locked_or_safepoint(Compile_lock);
1468 assert(is_interface(), "not interface");
1469 InstanceKlass* volatile* addr = adr_implementor();
1470 assert(addr != nullptr, "null addr");
1471 if (addr != nullptr) {
1472 AtomicAccess::release_store(addr, ik);
1473 }
1474 }
1475
1476 int InstanceKlass::nof_implementors() const {
1477 InstanceKlass* ik = implementor();
1478 if (ik == nullptr) {
1479 return 0;
1480 } else if (ik != this) {
1481 return 1;
1482 } else {
1483 return 2;
1484 }
1485 }
1486
1487 // The embedded _implementor field can only record one implementor.
1488 // When there are more than one implementors, the _implementor field
1489 // is set to the interface Klass* itself. Following are the possible
1490 // values for the _implementor field:
1491 // null - no implementor
1492 // implementor Klass* - one implementor
1493 // self - more than one implementor
1494 //
1495 // The _implementor field only exists for interfaces.
1496 void InstanceKlass::add_implementor(InstanceKlass* ik) {
1497 if (Universe::is_fully_initialized()) {
1498 assert_lock_strong(Compile_lock);
1499 }
1500 assert(is_interface(), "not interface");
1501 // Filter out my subinterfaces.
1502 // (Note: Interfaces are never on the subklass list.)
1503 if (ik->is_interface()) return;
1504
1505 // Filter out subclasses whose supers already implement me.
1506 // (Note: CHA must walk subclasses of direct implementors
1507 // in order to locate indirect implementors.)
1508 InstanceKlass* super_ik = ik->super();
1509 if (super_ik != nullptr && super_ik->implements_interface(this))
1510 // We only need to check one immediate superclass, since the
1511 // implements_interface query looks at transitive_interfaces.
1512 // Any supers of the super have the same (or fewer) transitive_interfaces.
1513 return;
1514
1515 InstanceKlass* iklass = implementor();
1516 if (iklass == nullptr) {
1517 set_implementor(ik);
1518 } else if (iklass != this && iklass != ik) {
1519 // There is already an implementor. Use itself as an indicator of
1520 // more than one implementors.
1521 set_implementor(this);
1522 }
1523
1524 // The implementor also implements the transitive_interfaces
1525 for (int index = 0; index < local_interfaces()->length(); index++) {
1526 local_interfaces()->at(index)->add_implementor(ik);
1527 }
1528 }
1529
1530 void InstanceKlass::init_implementor() {
1531 if (is_interface()) {
1532 set_implementor(nullptr);
1533 }
1534 }
1535
1536
1537 void InstanceKlass::process_interfaces() {
1538 // link this class into the implementors list of every interface it implements
1539 for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1540 assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
1541 InstanceKlass* interf = local_interfaces()->at(i);
1542 assert(interf->is_interface(), "expected interface");
1543 interf->add_implementor(this);
1544 }
1545 }
1546
1547 bool InstanceKlass::can_be_primary_super_slow() const {
1548 if (is_interface())
1549 return false;
1550 else
1551 return Klass::can_be_primary_super_slow();
1552 }
1553
1554 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1555 Array<InstanceKlass*>* transitive_interfaces) {
1556 // The secondaries are the implemented interfaces.
1557 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1558 // (but it's safe to do here because we won't write into _secondary_supers from this point on).
1559 Array<Klass*>* interfaces = (Array<Klass*>*)(address)transitive_interfaces;
1560 int num_secondaries = num_extra_slots + interfaces->length();
1561 if (num_secondaries == 0) {
1562 // Must share this for correct bootstrapping!
1563 set_secondary_supers(Universe::the_empty_klass_array(), Universe::the_empty_klass_bitmap());
1564 return nullptr;
1565 } else if (num_extra_slots == 0 && interfaces->length() <= 1) {
1566 // We will reuse the transitive interfaces list if we're certain
1567 // it's in hash order.
1568 uintx bitmap = compute_secondary_supers_bitmap(interfaces);
1569 set_secondary_supers(interfaces, bitmap);
1570 return nullptr;
1571 }
1572 // Copy transitive interfaces to a temporary growable array to be constructed
1573 // into the secondary super list with extra slots.
1574 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1575 for (int i = 0; i < interfaces->length(); i++) {
1576 secondaries->push(interfaces->at(i));
1577 }
1578 return secondaries;
1579 }
1580
1581 bool InstanceKlass::implements_interface(Klass* k) const {
1582 if (this == k) return true;
1583 assert(k->is_interface(), "should be an interface class");
1584 for (int i = 0; i < transitive_interfaces()->length(); i++) {
1585 if (transitive_interfaces()->at(i) == k) {
1586 return true;
1587 }
1588 }
1589 return false;
1590 }
1591
1592 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1593 // Verify direct super interface
1594 if (this == k) return true;
1595 assert(k->is_interface(), "should be an interface class");
1596 for (int i = 0; i < local_interfaces()->length(); i++) {
1597 if (local_interfaces()->at(i) == k) {
1598 return true;
1599 }
1600 }
1601 return false;
1602 }
1603
1604 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1605 if (TraceFinalizerRegistration) {
1606 tty->print("Registered ");
1607 i->print_value_on(tty);
1608 tty->print_cr(" (" PTR_FORMAT ") as finalizable", p2i(i));
1609 }
1610 instanceHandle h_i(THREAD, i);
1611 // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1612 JavaValue result(T_VOID);
1613 JavaCallArguments args(h_i);
1614 methodHandle mh(THREAD, Universe::finalizer_register_method());
1615 JavaCalls::call(&result, mh, &args, CHECK_NULL);
1616 MANAGEMENT_ONLY(FinalizerService::on_register(h_i(), THREAD);)
1617 return h_i();
1618 }
1619
1620 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1621 assert(!is_abstract() && !is_interface(), "Should not create this object");
1622 size_t size = size_helper(); // Query before forming handle.
1623 return (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL);
1624 }
1625
1626 instanceOop InstanceKlass::allocate_instance(oop java_class, TRAPS) {
1627 Klass* k = java_lang_Class::as_Klass(java_class);
1628 if (k == nullptr) {
1629 ResourceMark rm(THREAD);
1630 THROW_(vmSymbols::java_lang_InstantiationException(), nullptr);
1631 }
1632 InstanceKlass* ik = cast(k);
1633 ik->check_valid_for_instantiation(false, CHECK_NULL);
1634 ik->initialize(CHECK_NULL);
1635 return ik->allocate_instance(THREAD);
1636 }
1637
1638 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) {
1639 return instanceHandle(THREAD, allocate_instance(THREAD));
1640 }
1641
1642 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1643 if (is_interface() || is_abstract()) {
1644 ResourceMark rm(THREAD);
1645 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1646 : vmSymbols::java_lang_InstantiationException(), external_name());
1647 }
1648 if (this == vmClasses::Class_klass()) {
1649 ResourceMark rm(THREAD);
1650 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1651 : vmSymbols::java_lang_IllegalAccessException(), external_name());
1652 }
1653 }
1654
1655 ArrayKlass* InstanceKlass::array_klass(int n, TRAPS) {
1656 // Need load-acquire for lock-free read
1657 if (array_klasses_acquire() == nullptr) {
1658
1659 // Recursively lock array allocation
1660 RecursiveLocker rl(MultiArray_lock, THREAD);
1661
1662 // Check if another thread created the array klass while we were waiting for the lock.
1663 if (array_klasses() == nullptr) {
1664 ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL);
1665 // use 'release' to pair with lock-free load
1666 release_set_array_klasses(k);
1667 }
1668 }
1669
1670 // array_klasses() will always be set at this point
1671 ObjArrayKlass* ak = array_klasses();
1672 assert(ak != nullptr, "should be set");
1673 return ak->array_klass(n, THREAD);
1674 }
1675
1676 ArrayKlass* InstanceKlass::array_klass_or_null(int n) {
1677 // Need load-acquire for lock-free read
1678 ObjArrayKlass* oak = array_klasses_acquire();
1679 if (oak == nullptr) {
1680 return nullptr;
1681 } else {
1682 return oak->array_klass_or_null(n);
1683 }
1684 }
1685
1686 ArrayKlass* InstanceKlass::array_klass(TRAPS) {
1687 return array_klass(1, THREAD);
1688 }
1689
1690 ArrayKlass* InstanceKlass::array_klass_or_null() {
1691 return array_klass_or_null(1);
1692 }
1693
1694 static int call_class_initializer_counter = 0; // for debugging
1695
1696 Method* InstanceKlass::class_initializer() const {
1697 Method* clinit = find_method(
1698 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1699 if (clinit != nullptr && clinit->has_valid_initializer_flags()) {
1700 return clinit;
1701 }
1702 return nullptr;
1703 }
1704
1705 void InstanceKlass::call_class_initializer(TRAPS) {
1706 if (ReplayCompiles &&
1707 (ReplaySuppressInitializers == 1 ||
1708 (ReplaySuppressInitializers >= 2 && class_loader() != nullptr))) {
1709 // Hide the existence of the initializer for the purpose of replaying the compile
1710 return;
1711 }
1712
1713 #if INCLUDE_CDS
1714 // This is needed to ensure the consistency of the archived heap objects.
1715 if (has_aot_initialized_mirror() && CDSConfig::is_loading_heap()) {
1716 AOTClassInitializer::call_runtime_setup(THREAD, this);
1717 return;
1718 } else if (has_archived_enum_objs()) {
1719 assert(in_aot_cache(), "must be");
1720 bool initialized = CDSEnumKlass::initialize_enum_klass(this, CHECK);
1721 if (initialized) {
1722 return;
1723 }
1724 }
1725 #endif
1726
1727 methodHandle h_method(THREAD, class_initializer());
1728 assert(!is_initialized(), "we cannot initialize twice");
1729 LogTarget(Info, class, init) lt;
1730 if (lt.is_enabled()) {
1731 ResourceMark rm(THREAD);
1732 LogStream ls(lt);
1733 ls.print("%d Initializing ", call_class_initializer_counter++);
1734 name()->print_value_on(&ls);
1735 ls.print_cr("%s (" PTR_FORMAT ") by thread \"%s\"",
1736 h_method() == nullptr ? "(no method)" : "", p2i(this),
1737 THREAD->name());
1738 }
1739 if (h_method() != nullptr) {
1740 ThreadInClassInitializer ticl(THREAD, this); // Track class being initialized
1741 JavaCallArguments args; // No arguments
1742 JavaValue result(T_VOID);
1743 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1744 }
1745 }
1746
1747 // If a class that implements this interface is initialized, is the JVM required
1748 // to first execute a <clinit> method declared in this interface,
1749 // or (if also_check_supers==true) any of the super types of this interface?
1750 //
1751 // JVMS 5.5. Initialization, step 7: Next, if C is a class rather than
1752 // an interface, then let SC be its superclass and let SI1, ..., SIn
1753 // be all superinterfaces of C (whether direct or indirect) that
1754 // declare at least one non-abstract, non-static method.
1755 //
1756 // So when an interface is initialized, it does not look at its
1757 // supers. But a proper class will ensure that all of its supers have
1758 // run their <clinit> methods, except that it disregards interfaces
1759 // that lack a non-static concrete method (i.e., a default method).
1760 // Therefore, you should probably call this method only when the
1761 // current class is a super of some proper class, not an interface.
1762 bool InstanceKlass::interface_needs_clinit_execution_as_super(bool also_check_supers) const {
1763 assert(is_interface(), "must be");
1764
1765 if (!has_nonstatic_concrete_methods()) {
1766 // quick check: no nonstatic concrete methods are declared by this or any super interfaces
1767 return false;
1768 }
1769
1770 // JVMS 5.5. Initialization
1771 // ...If C is an interface that declares a non-abstract,
1772 // non-static method, the initialization of a class that
1773 // implements C directly or indirectly.
1774 if (declares_nonstatic_concrete_methods() && class_initializer() != nullptr) {
1775 return true;
1776 }
1777 if (also_check_supers) {
1778 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
1779 for (int i = 0; i < all_ifs->length(); ++i) {
1780 InstanceKlass* super_intf = all_ifs->at(i);
1781 if (super_intf->declares_nonstatic_concrete_methods() && super_intf->class_initializer() != nullptr) {
1782 return true;
1783 }
1784 }
1785 }
1786 return false;
1787 }
1788
1789 void InstanceKlass::mask_for(const methodHandle& method, int bci,
1790 InterpreterOopMap* entry_for) {
1791 // Lazily create the _oop_map_cache at first request.
1792 // Load_acquire is needed to safely get instance published with CAS by another thread.
1793 OopMapCache* oop_map_cache = AtomicAccess::load_acquire(&_oop_map_cache);
1794 if (oop_map_cache == nullptr) {
1795 // Try to install new instance atomically.
1796 oop_map_cache = new OopMapCache();
1797 OopMapCache* other = AtomicAccess::cmpxchg(&_oop_map_cache, (OopMapCache*)nullptr, oop_map_cache);
1798 if (other != nullptr) {
1799 // Someone else managed to install before us, ditch local copy and use the existing one.
1800 delete oop_map_cache;
1801 oop_map_cache = other;
1802 }
1803 }
1804 // _oop_map_cache is constant after init; lookup below does its own locking.
1805 oop_map_cache->lookup(method, bci, entry_for);
1806 }
1807
1808 bool InstanceKlass::contains_field_offset(int offset) {
1809 fieldDescriptor fd;
1810 return find_field_from_offset(offset, false, &fd);
1811 }
1812
1813 FieldInfo InstanceKlass::field(int index) const {
1814 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
1815 if (fs.index() == index) {
1816 return fs.to_FieldInfo();
1817 }
1818 }
1819 fatal("Field not found");
1820 return FieldInfo();
1821 }
1822
1823 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1824 JavaFieldStream fs(this);
1825 if (fs.lookup(name, sig)) {
1826 assert(fs.name() == name, "name must match");
1827 assert(fs.signature() == sig, "signature must match");
1828 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
1829 return true;
1830 }
1831 return false;
1832 }
1833
1834
1835 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1836 const int n = local_interfaces()->length();
1837 for (int i = 0; i < n; i++) {
1838 InstanceKlass* intf1 = local_interfaces()->at(i);
1839 assert(intf1->is_interface(), "just checking type");
1840 // search for field in current interface
1841 if (intf1->find_local_field(name, sig, fd)) {
1842 assert(fd->is_static(), "interface field must be static");
1843 return intf1;
1844 }
1845 // search for field in direct superinterfaces
1846 Klass* intf2 = intf1->find_interface_field(name, sig, fd);
1847 if (intf2 != nullptr) return intf2;
1848 }
1849 // otherwise field lookup fails
1850 return nullptr;
1851 }
1852
1853
1854 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1855 // search order according to newest JVM spec (5.4.3.2, p.167).
1856 // 1) search for field in current klass
1857 if (find_local_field(name, sig, fd)) {
1858 return const_cast<InstanceKlass*>(this);
1859 }
1860 // 2) search for field recursively in direct superinterfaces
1861 { Klass* intf = find_interface_field(name, sig, fd);
1862 if (intf != nullptr) return intf;
1863 }
1864 // 3) apply field lookup recursively if superclass exists
1865 { InstanceKlass* supr = super();
1866 if (supr != nullptr) return supr->find_field(name, sig, fd);
1867 }
1868 // 4) otherwise field lookup fails
1869 return nullptr;
1870 }
1871
1872
1873 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
1874 // search order according to newest JVM spec (5.4.3.2, p.167).
1875 // 1) search for field in current klass
1876 if (find_local_field(name, sig, fd)) {
1877 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
1878 }
1879 // 2) search for field recursively in direct superinterfaces
1880 if (is_static) {
1881 Klass* intf = find_interface_field(name, sig, fd);
1882 if (intf != nullptr) return intf;
1883 }
1884 // 3) apply field lookup recursively if superclass exists
1885 { InstanceKlass* supr = super();
1886 if (supr != nullptr) return supr->find_field(name, sig, is_static, fd);
1887 }
1888 // 4) otherwise field lookup fails
1889 return nullptr;
1890 }
1891
1892
1893 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1894 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1895 if (fs.offset() == offset) {
1896 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
1897 if (fd->is_static() == is_static) return true;
1898 }
1899 }
1900 return false;
1901 }
1902
1903
1904 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1905 const InstanceKlass* klass = this;
1906 while (klass != nullptr) {
1907 if (klass->find_local_field_from_offset(offset, is_static, fd)) {
1908 return true;
1909 }
1910 klass = klass->super();
1911 }
1912 return false;
1913 }
1914
1915
1916 void InstanceKlass::methods_do(void f(Method* method)) {
1917 // Methods aren't stable until they are loaded. This can be read outside
1918 // a lock through the ClassLoaderData for profiling
1919 // Redefined scratch classes are on the list and need to be cleaned
1920 if (!is_loaded() && !is_scratch_class()) {
1921 return;
1922 }
1923
1924 int len = methods()->length();
1925 for (int index = 0; index < len; index++) {
1926 Method* m = methods()->at(index);
1927 assert(m->is_method(), "must be method");
1928 f(m);
1929 }
1930 }
1931
1932
1933 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1934 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1935 if (fs.access_flags().is_static()) {
1936 fieldDescriptor& fd = fs.field_descriptor();
1937 cl->do_field(&fd);
1938 }
1939 }
1940 }
1941
1942
1943 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
1944 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1945 if (fs.access_flags().is_static()) {
1946 fieldDescriptor& fd = fs.field_descriptor();
1947 f(&fd, mirror, CHECK);
1948 }
1949 }
1950 }
1951
1952 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1953 InstanceKlass* super = this->super();
1954 if (super != nullptr) {
1955 super->do_nonstatic_fields(cl);
1956 }
1957 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1958 fieldDescriptor& fd = fs.field_descriptor();
1959 if (!fd.is_static()) {
1960 cl->do_field(&fd);
1961 }
1962 }
1963 }
1964
1965 static int compare_fields_by_offset(FieldInfo* a, FieldInfo* b) {
1966 return a->offset() - b->offset();
1967 }
1968
1969 void InstanceKlass::print_nonstatic_fields(FieldClosure* cl) {
1970 InstanceKlass* super = this->super();
1971 if (super != nullptr) {
1972 super->print_nonstatic_fields(cl);
1973 }
1974 ResourceMark rm;
1975 // In DebugInfo nonstatic fields are sorted by offset.
1976 GrowableArray<FieldInfo> fields_sorted;
1977 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
1978 if (!fs.access_flags().is_static()) {
1979 fields_sorted.push(fs.to_FieldInfo());
1980 }
1981 }
1982 int length = fields_sorted.length();
1983 if (length > 0) {
1984 fields_sorted.sort(compare_fields_by_offset);
1985 fieldDescriptor fd;
1986 for (int i = 0; i < length; i++) {
1987 fd.reinitialize(this, fields_sorted.at(i));
1988 assert(!fd.is_static() && fd.offset() == checked_cast<int>(fields_sorted.at(i).offset()), "only nonstatic fields");
1989 cl->do_field(&fd);
1990 }
1991 }
1992 }
1993
1994 #ifdef ASSERT
1995 static int linear_search(const Array<Method*>* methods,
1996 const Symbol* name,
1997 const Symbol* signature) {
1998 const int len = methods->length();
1999 for (int index = 0; index < len; index++) {
2000 const Method* const m = methods->at(index);
2001 assert(m->is_method(), "must be method");
2002 if (m->signature() == signature && m->name() == name) {
2003 return index;
2004 }
2005 }
2006 return -1;
2007 }
2008 #endif
2009
2010 bool InstanceKlass::_disable_method_binary_search = false;
2011
2012 NOINLINE int linear_search(const Array<Method*>* methods, const Symbol* name) {
2013 int len = methods->length();
2014 int l = 0;
2015 int h = len - 1;
2016 while (l <= h) {
2017 Method* m = methods->at(l);
2018 if (m->name() == name) {
2019 return l;
2020 }
2021 l++;
2022 }
2023 return -1;
2024 }
2025
2026 inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
2027 if (_disable_method_binary_search) {
2028 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
2029 // At the final stage of dynamic dumping, the methods array may not be sorted
2030 // by ascending addresses of their names, so we can't use binary search anymore.
2031 // However, methods with the same name are still laid out consecutively inside the
2032 // methods array, so let's look for the first one that matches.
2033 return linear_search(methods, name);
2034 }
2035
2036 int len = methods->length();
2037 int l = 0;
2038 int h = len - 1;
2039
2040 // methods are sorted by ascending addresses of their names, so do binary search
2041 while (l <= h) {
2042 int mid = (l + h) >> 1;
2043 Method* m = methods->at(mid);
2044 assert(m->is_method(), "must be method");
2045 int res = m->name()->fast_compare(name);
2046 if (res == 0) {
2047 return mid;
2048 } else if (res < 0) {
2049 l = mid + 1;
2050 } else {
2051 h = mid - 1;
2052 }
2053 }
2054 return -1;
2055 }
2056
2057 // find_method looks up the name/signature in the local methods array
2058 Method* InstanceKlass::find_method(const Symbol* name,
2059 const Symbol* signature) const {
2060 return find_method_impl(name, signature,
2061 OverpassLookupMode::find,
2062 StaticLookupMode::find,
2063 PrivateLookupMode::find);
2064 }
2065
2066 Method* InstanceKlass::find_method_impl(const Symbol* name,
2067 const Symbol* signature,
2068 OverpassLookupMode overpass_mode,
2069 StaticLookupMode static_mode,
2070 PrivateLookupMode private_mode) const {
2071 return InstanceKlass::find_method_impl(methods(),
2072 name,
2073 signature,
2074 overpass_mode,
2075 static_mode,
2076 private_mode);
2077 }
2078
2079 // find_instance_method looks up the name/signature in the local methods array
2080 // and skips over static methods
2081 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
2082 const Symbol* name,
2083 const Symbol* signature,
2084 PrivateLookupMode private_mode) {
2085 Method* const meth = InstanceKlass::find_method_impl(methods,
2086 name,
2087 signature,
2088 OverpassLookupMode::find,
2089 StaticLookupMode::skip,
2090 private_mode);
2091 assert(((meth == nullptr) || !meth->is_static()),
2092 "find_instance_method should have skipped statics");
2093 return meth;
2094 }
2095
2096 // find_instance_method looks up the name/signature in the local methods array
2097 // and skips over static methods
2098 Method* InstanceKlass::find_instance_method(const Symbol* name,
2099 const Symbol* signature,
2100 PrivateLookupMode private_mode) const {
2101 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
2102 }
2103
2104 // Find looks up the name/signature in the local methods array
2105 // and filters on the overpass, static and private flags
2106 // This returns the first one found
2107 // note that the local methods array can have up to one overpass, one static
2108 // and one instance (private or not) with the same name/signature
2109 Method* InstanceKlass::find_local_method(const Symbol* name,
2110 const Symbol* signature,
2111 OverpassLookupMode overpass_mode,
2112 StaticLookupMode static_mode,
2113 PrivateLookupMode private_mode) const {
2114 return InstanceKlass::find_method_impl(methods(),
2115 name,
2116 signature,
2117 overpass_mode,
2118 static_mode,
2119 private_mode);
2120 }
2121
2122 // Find looks up the name/signature in the local methods array
2123 // and filters on the overpass, static and private flags
2124 // This returns the first one found
2125 // note that the local methods array can have up to one overpass, one static
2126 // and one instance (private or not) with the same name/signature
2127 Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
2128 const Symbol* name,
2129 const Symbol* signature,
2130 OverpassLookupMode overpass_mode,
2131 StaticLookupMode static_mode,
2132 PrivateLookupMode private_mode) {
2133 return InstanceKlass::find_method_impl(methods,
2134 name,
2135 signature,
2136 overpass_mode,
2137 static_mode,
2138 private_mode);
2139 }
2140
2141 Method* InstanceKlass::find_method(const Array<Method*>* methods,
2142 const Symbol* name,
2143 const Symbol* signature) {
2144 return InstanceKlass::find_method_impl(methods,
2145 name,
2146 signature,
2147 OverpassLookupMode::find,
2148 StaticLookupMode::find,
2149 PrivateLookupMode::find);
2150 }
2151
2152 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
2153 const Symbol* name,
2154 const Symbol* signature,
2155 OverpassLookupMode overpass_mode,
2156 StaticLookupMode static_mode,
2157 PrivateLookupMode private_mode) {
2158 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
2159 return hit >= 0 ? methods->at(hit): nullptr;
2160 }
2161
2162 // true if method matches signature and conforms to skipping_X conditions.
2163 static bool method_matches(const Method* m,
2164 const Symbol* signature,
2165 bool skipping_overpass,
2166 bool skipping_static,
2167 bool skipping_private) {
2168 return ((m->signature() == signature) &&
2169 (!skipping_overpass || !m->is_overpass()) &&
2170 (!skipping_static || !m->is_static()) &&
2171 (!skipping_private || !m->is_private()));
2172 }
2173
2174 // Used directly for default_methods to find the index into the
2175 // default_vtable_indices, and indirectly by find_method
2176 // find_method_index looks in the local methods array to return the index
2177 // of the matching name/signature. If, overpass methods are being ignored,
2178 // the search continues to find a potential non-overpass match. This capability
2179 // is important during method resolution to prefer a static method, for example,
2180 // over an overpass method.
2181 // There is the possibility in any _method's array to have the same name/signature
2182 // for a static method, an overpass method and a local instance method
2183 // To correctly catch a given method, the search criteria may need
2184 // to explicitly skip the other two. For local instance methods, it
2185 // is often necessary to skip private methods
2186 int InstanceKlass::find_method_index(const Array<Method*>* methods,
2187 const Symbol* name,
2188 const Symbol* signature,
2189 OverpassLookupMode overpass_mode,
2190 StaticLookupMode static_mode,
2191 PrivateLookupMode private_mode) {
2192 const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip);
2193 const bool skipping_static = (static_mode == StaticLookupMode::skip);
2194 const bool skipping_private = (private_mode == PrivateLookupMode::skip);
2195 const int hit = quick_search(methods, name);
2196 if (hit != -1) {
2197 const Method* const m = methods->at(hit);
2198
2199 // Do linear search to find matching signature. First, quick check
2200 // for common case, ignoring overpasses if requested.
2201 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2202 return hit;
2203 }
2204
2205 // search downwards through overloaded methods
2206 int i;
2207 for (i = hit - 1; i >= 0; --i) {
2208 const Method* const m = methods->at(i);
2209 assert(m->is_method(), "must be method");
2210 if (m->name() != name) {
2211 break;
2212 }
2213 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2214 return i;
2215 }
2216 }
2217 // search upwards
2218 for (i = hit + 1; i < methods->length(); ++i) {
2219 const Method* const m = methods->at(i);
2220 assert(m->is_method(), "must be method");
2221 if (m->name() != name) {
2222 break;
2223 }
2224 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2225 return i;
2226 }
2227 }
2228 // not found
2229 #ifdef ASSERT
2230 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
2231 linear_search(methods, name, signature);
2232 assert(-1 == index, "binary search should have found entry %d", index);
2233 #endif
2234 }
2235 return -1;
2236 }
2237
2238 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
2239 return find_method_by_name(methods(), name, end);
2240 }
2241
2242 int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
2243 const Symbol* name,
2244 int* end_ptr) {
2245 assert(end_ptr != nullptr, "just checking");
2246 int start = quick_search(methods, name);
2247 int end = start + 1;
2248 if (start != -1) {
2249 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
2250 while (end < methods->length() && (methods->at(end))->name() == name) ++end;
2251 *end_ptr = end;
2252 return start;
2253 }
2254 return -1;
2255 }
2256
2257 // uncached_lookup_method searches both the local class methods array and all
2258 // superclasses methods arrays, skipping any overpass methods in superclasses,
2259 // and possibly skipping private methods.
2260 Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
2261 const Symbol* signature,
2262 OverpassLookupMode overpass_mode,
2263 PrivateLookupMode private_mode) const {
2264 OverpassLookupMode overpass_local_mode = overpass_mode;
2265 const InstanceKlass* klass = this;
2266 while (klass != nullptr) {
2267 Method* const method = klass->find_method_impl(name,
2268 signature,
2269 overpass_local_mode,
2270 StaticLookupMode::find,
2271 private_mode);
2272 if (method != nullptr) {
2273 return method;
2274 }
2275 klass = klass->super();
2276 overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses
2277 }
2278 return nullptr;
2279 }
2280
2281 #ifdef ASSERT
2282 // search through class hierarchy and return true if this class or
2283 // one of the superclasses was redefined
2284 bool InstanceKlass::has_redefined_this_or_super() const {
2285 const InstanceKlass* klass = this;
2286 while (klass != nullptr) {
2287 if (klass->has_been_redefined()) {
2288 return true;
2289 }
2290 klass = klass->super();
2291 }
2292 return false;
2293 }
2294 #endif
2295
2296 // lookup a method in the default methods list then in all transitive interfaces
2297 // Do NOT return private or static methods
2298 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
2299 Symbol* signature) const {
2300 Method* m = nullptr;
2301 if (default_methods() != nullptr) {
2302 m = find_method(default_methods(), name, signature);
2303 }
2304 // Look up interfaces
2305 if (m == nullptr) {
2306 m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find);
2307 }
2308 return m;
2309 }
2310
2311 // lookup a method in all the interfaces that this class implements
2312 // Do NOT return private or static methods, new in JDK8 which are not externally visible
2313 // They should only be found in the initial InterfaceMethodRef
2314 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
2315 Symbol* signature,
2316 DefaultsLookupMode defaults_mode) const {
2317 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2318 int num_ifs = all_ifs->length();
2319 InstanceKlass *ik = nullptr;
2320 for (int i = 0; i < num_ifs; i++) {
2321 ik = all_ifs->at(i);
2322 Method* m = ik->lookup_method(name, signature);
2323 if (m != nullptr && m->is_public() && !m->is_static() &&
2324 ((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) {
2325 return m;
2326 }
2327 }
2328 return nullptr;
2329 }
2330
2331 PrintClassClosure::PrintClassClosure(outputStream* st, bool verbose)
2332 :_st(st), _verbose(verbose) {
2333 ResourceMark rm;
2334 _st->print("%-18s ", "KlassAddr");
2335 _st->print("%-4s ", "Size");
2336 _st->print("%-20s ", "State");
2337 _st->print("%-7s ", "Flags");
2338 _st->print("%-5s ", "ClassName");
2339 _st->cr();
2340 }
2341
2342 void PrintClassClosure::do_klass(Klass* k) {
2343 ResourceMark rm;
2344 // klass pointer
2345 _st->print(PTR_FORMAT " ", p2i(k));
2346 // klass size
2347 _st->print("%4d ", k->size());
2348 // initialization state
2349 if (k->is_instance_klass()) {
2350 _st->print("%-20s ",InstanceKlass::cast(k)->init_state_name());
2351 } else {
2352 _st->print("%-20s ","");
2353 }
2354 // misc flags(Changes should synced with ClassesDCmd::ClassesDCmd help doc)
2355 char buf[10];
2356 int i = 0;
2357 if (k->has_finalizer()) buf[i++] = 'F';
2358 if (k->is_instance_klass()) {
2359 InstanceKlass* ik = InstanceKlass::cast(k);
2360 if (ik->has_final_method()) buf[i++] = 'f';
2361 if (ik->is_rewritten()) buf[i++] = 'W';
2362 if (ik->is_contended()) buf[i++] = 'C';
2363 if (ik->has_been_redefined()) buf[i++] = 'R';
2364 if (ik->in_aot_cache()) buf[i++] = 'S';
2365 }
2366 buf[i++] = '\0';
2367 _st->print("%-7s ", buf);
2368 // klass name
2369 _st->print("%-5s ", k->external_name());
2370 // end
2371 _st->cr();
2372 if (_verbose) {
2373 k->print_on(_st);
2374 }
2375 }
2376
2377 /* jni_id_for for jfieldIds only */
2378 JNIid* InstanceKlass::jni_id_for(int offset) {
2379 MutexLocker ml(JfieldIdCreation_lock);
2380 JNIid* probe = jni_ids() == nullptr ? nullptr : jni_ids()->find(offset);
2381 if (probe == nullptr) {
2382 // Allocate new static field identifier
2383 probe = new JNIid(this, offset, jni_ids());
2384 set_jni_ids(probe);
2385 }
2386 return probe;
2387 }
2388
2389 u2 InstanceKlass::enclosing_method_data(int offset) const {
2390 const Array<jushort>* const inner_class_list = inner_classes();
2391 if (inner_class_list == nullptr) {
2392 return 0;
2393 }
2394 const int length = inner_class_list->length();
2395 if (length % inner_class_next_offset == 0) {
2396 return 0;
2397 }
2398 const int index = length - enclosing_method_attribute_size;
2399 assert(offset < enclosing_method_attribute_size, "invalid offset");
2400 return inner_class_list->at(index + offset);
2401 }
2402
2403 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
2404 u2 method_index) {
2405 Array<jushort>* inner_class_list = inner_classes();
2406 assert (inner_class_list != nullptr, "_inner_classes list is not set up");
2407 int length = inner_class_list->length();
2408 if (length % inner_class_next_offset == enclosing_method_attribute_size) {
2409 int index = length - enclosing_method_attribute_size;
2410 inner_class_list->at_put(
2411 index + enclosing_method_class_index_offset, class_index);
2412 inner_class_list->at_put(
2413 index + enclosing_method_method_index_offset, method_index);
2414 }
2415 }
2416
2417 jmethodID InstanceKlass::update_jmethod_id(jmethodID* jmeths, Method* method, int idnum) {
2418 if (method->is_old() && !method->is_obsolete()) {
2419 // If the method passed in is old (but not obsolete), use the current version.
2420 method = method_with_idnum((int)idnum);
2421 assert(method != nullptr, "old and but not obsolete, so should exist");
2422 }
2423 jmethodID new_id = Method::make_jmethod_id(class_loader_data(), method);
2424 AtomicAccess::release_store(&jmeths[idnum + 1], new_id);
2425 return new_id;
2426 }
2427
2428 // Allocate the jmethodID cache.
2429 static jmethodID* create_jmethod_id_cache(size_t size) {
2430 jmethodID* jmeths = NEW_C_HEAP_ARRAY(jmethodID, size + 1, mtClass);
2431 memset(jmeths, 0, (size + 1) * sizeof(jmethodID));
2432 // cache size is stored in element[0], other elements offset by one
2433 jmeths[0] = (jmethodID)size;
2434 return jmeths;
2435 }
2436
2437 // When reading outside a lock, use this.
2438 jmethodID* InstanceKlass::methods_jmethod_ids_acquire() const {
2439 return AtomicAccess::load_acquire(&_methods_jmethod_ids);
2440 }
2441
2442 void InstanceKlass::release_set_methods_jmethod_ids(jmethodID* jmeths) {
2443 AtomicAccess::release_store(&_methods_jmethod_ids, jmeths);
2444 }
2445
2446 // Lookup or create a jmethodID.
2447 jmethodID InstanceKlass::get_jmethod_id(Method* method) {
2448 int idnum = method->method_idnum();
2449 jmethodID* jmeths = methods_jmethod_ids_acquire();
2450
2451 // We use a double-check locking idiom here because this cache is
2452 // performance sensitive. In the normal system, this cache only
2453 // transitions from null to non-null which is safe because we use
2454 // release_set_methods_jmethod_ids() to advertise the new cache.
2455 // A partially constructed cache should never be seen by a racing
2456 // thread. We also use release_store() to save a new jmethodID
2457 // in the cache so a partially constructed jmethodID should never be
2458 // seen either. Cache reads of existing jmethodIDs proceed without a
2459 // lock, but cache writes of a new jmethodID requires uniqueness and
2460 // creation of the cache itself requires no leaks so a lock is
2461 // acquired in those two cases.
2462 //
2463 // If the RedefineClasses() API has been used, then this cache grows
2464 // in the redefinition safepoint.
2465
2466 if (jmeths == nullptr) {
2467 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2468 jmeths = _methods_jmethod_ids;
2469 // Still null?
2470 if (jmeths == nullptr) {
2471 size_t size = idnum_allocated_count();
2472 assert(size > (size_t)idnum, "should already have space");
2473 jmeths = create_jmethod_id_cache(size);
2474 jmethodID new_id = update_jmethod_id(jmeths, method, idnum);
2475
2476 // publish jmeths
2477 release_set_methods_jmethod_ids(jmeths);
2478 return new_id;
2479 }
2480 }
2481
2482 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2483 if (id == nullptr) {
2484 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2485 id = jmeths[idnum + 1];
2486 // Still null?
2487 if (id == nullptr) {
2488 return update_jmethod_id(jmeths, method, idnum);
2489 }
2490 }
2491 return id;
2492 }
2493
2494 void InstanceKlass::update_methods_jmethod_cache() {
2495 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2496 jmethodID* cache = _methods_jmethod_ids;
2497 if (cache != nullptr) {
2498 size_t size = idnum_allocated_count();
2499 size_t old_size = (size_t)cache[0];
2500 if (old_size < size + 1) {
2501 // Allocate a larger one and copy entries to the new one.
2502 // They've already been updated to point to new methods where applicable (i.e., not obsolete).
2503 jmethodID* new_cache = create_jmethod_id_cache(size);
2504
2505 for (int i = 1; i <= (int)old_size; i++) {
2506 new_cache[i] = cache[i];
2507 }
2508 _methods_jmethod_ids = new_cache;
2509 FREE_C_HEAP_ARRAY(jmethodID, cache);
2510 }
2511 }
2512 }
2513
2514 // Make a jmethodID for all methods in this class. This makes getting all method
2515 // ids much, much faster with classes with more than 8
2516 // methods, and has a *substantial* effect on performance with jvmti
2517 // code that loads all jmethodIDs for all classes.
2518 void InstanceKlass::make_methods_jmethod_ids() {
2519 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2520 jmethodID* jmeths = _methods_jmethod_ids;
2521 if (jmeths == nullptr) {
2522 jmeths = create_jmethod_id_cache(idnum_allocated_count());
2523 release_set_methods_jmethod_ids(jmeths);
2524 }
2525
2526 int length = methods()->length();
2527 for (int index = 0; index < length; index++) {
2528 Method* m = methods()->at(index);
2529 int idnum = m->method_idnum();
2530 assert(!m->is_old(), "should not have old methods or I'm confused");
2531 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2532 if (!m->is_overpass() && // skip overpasses
2533 id == nullptr) {
2534 id = Method::make_jmethod_id(class_loader_data(), m);
2535 AtomicAccess::release_store(&jmeths[idnum + 1], id);
2536 }
2537 }
2538 }
2539
2540 // Lookup a jmethodID, null if not found. Do no blocking, no allocations, no handles
2541 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2542 int idnum = method->method_idnum();
2543 jmethodID* jmeths = methods_jmethod_ids_acquire();
2544 return (jmeths != nullptr) ? jmeths[idnum + 1] : nullptr;
2545 }
2546
2547 inline DependencyContext InstanceKlass::dependencies() {
2548 DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2549 return dep_context;
2550 }
2551
2552 void InstanceKlass::mark_dependent_nmethods(DeoptimizationScope* deopt_scope, KlassDepChange& changes) {
2553 dependencies().mark_dependent_nmethods(deopt_scope, changes);
2554 }
2555
2556 void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2557 assert_lock_strong(CodeCache_lock);
2558 dependencies().add_dependent_nmethod(nm);
2559 }
2560
2561 void InstanceKlass::clean_dependency_context() {
2562 dependencies().clean_unloading_dependents();
2563 }
2564
2565 #ifndef PRODUCT
2566 void InstanceKlass::print_dependent_nmethods(bool verbose) {
2567 dependencies().print_dependent_nmethods(verbose);
2568 }
2569
2570 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2571 return dependencies().is_dependent_nmethod(nm);
2572 }
2573 #endif //PRODUCT
2574
2575 void InstanceKlass::clean_weak_instanceklass_links() {
2576 clean_implementors_list();
2577 clean_method_data();
2578 }
2579
2580 void InstanceKlass::clean_implementors_list() {
2581 assert(is_loader_alive(), "this klass should be live");
2582 if (is_interface()) {
2583 assert (ClassUnloading, "only called for ClassUnloading");
2584 for (;;) {
2585 // Use load_acquire due to competing with inserts
2586 InstanceKlass* volatile* iklass = adr_implementor();
2587 assert(iklass != nullptr, "Klass must not be null");
2588 InstanceKlass* impl = AtomicAccess::load_acquire(iklass);
2589 if (impl != nullptr && !impl->is_loader_alive()) {
2590 // null this field, might be an unloaded instance klass or null
2591 if (AtomicAccess::cmpxchg(iklass, impl, (InstanceKlass*)nullptr) == impl) {
2592 // Successfully unlinking implementor.
2593 if (log_is_enabled(Trace, class, unload)) {
2594 ResourceMark rm;
2595 log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2596 }
2597 return;
2598 }
2599 } else {
2600 return;
2601 }
2602 }
2603 }
2604 }
2605
2606 void InstanceKlass::clean_method_data() {
2607 for (int m = 0; m < methods()->length(); m++) {
2608 MethodData* mdo = methods()->at(m)->method_data();
2609 if (mdo != nullptr) {
2610 mdo->clean_method_data(/*always_clean*/false);
2611 }
2612 }
2613 }
2614
2615 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
2616 Klass::metaspace_pointers_do(it);
2617
2618 if (log_is_enabled(Trace, aot)) {
2619 ResourceMark rm;
2620 log_trace(aot)("Iter(InstanceKlass): %p (%s)", this, external_name());
2621 }
2622
2623 it->push(&_annotations);
2624 it->push((Klass**)&_array_klasses);
2625 if (!is_rewritten()) {
2626 it->push(&_constants, MetaspaceClosure::_writable);
2627 } else {
2628 it->push(&_constants);
2629 }
2630 it->push(&_inner_classes);
2631 #if INCLUDE_JVMTI
2632 it->push(&_previous_versions);
2633 #endif
2634 #if INCLUDE_CDS
2635 // For "old" classes with methods containing the jsr bytecode, the _methods array will
2636 // be rewritten during runtime (see Rewriter::rewrite_jsrs()) but they cannot be safely
2637 // checked here with ByteCodeStream. All methods that can't be verified are made writable.
2638 // The length check on the _methods is necessary because classes which don't have any
2639 // methods share the Universe::_the_empty_method_array which is in the RO region.
2640 if (_methods != nullptr && _methods->length() > 0 && !can_be_verified_at_dumptime()) {
2641 // To handle jsr bytecode, new Method* maybe stored into _methods
2642 it->push(&_methods, MetaspaceClosure::_writable);
2643 } else {
2644 #endif
2645 it->push(&_methods);
2646 #if INCLUDE_CDS
2647 }
2648 #endif
2649 it->push(&_default_methods);
2650 it->push(&_local_interfaces);
2651 it->push(&_transitive_interfaces);
2652 it->push(&_method_ordering);
2653 if (!is_rewritten()) {
2654 it->push(&_default_vtable_indices, MetaspaceClosure::_writable);
2655 } else {
2656 it->push(&_default_vtable_indices);
2657 }
2658
2659 it->push(&_fieldinfo_stream);
2660 it->push(&_fieldinfo_search_table);
2661 // _fields_status might be written into by Rewriter::scan_method() -> fd.set_has_initialized_final_update()
2662 it->push(&_fields_status, MetaspaceClosure::_writable);
2663
2664 if (itable_length() > 0) {
2665 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2666 int method_table_offset_in_words = ioe->offset()/wordSize;
2667 int itable_offset_in_words = (int)(start_of_itable() - (intptr_t*)this);
2668
2669 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words)
2670 / itableOffsetEntry::size();
2671
2672 for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
2673 if (ioe->interface_klass() != nullptr) {
2674 it->push(ioe->interface_klass_addr());
2675 itableMethodEntry* ime = ioe->first_method_entry(this);
2676 int n = klassItable::method_count_for_interface(ioe->interface_klass());
2677 for (int index = 0; index < n; index ++) {
2678 it->push(ime[index].method_addr());
2679 }
2680 }
2681 }
2682 }
2683
2684 it->push(&_nest_host);
2685 it->push(&_nest_members);
2686 it->push(&_permitted_subclasses);
2687 it->push(&_record_components);
2688 }
2689
2690 #if INCLUDE_CDS
2691 void InstanceKlass::remove_unshareable_info() {
2692
2693 if (is_linked()) {
2694 assert(can_be_verified_at_dumptime(), "must be");
2695 // Remember this so we can avoid walking the hierarchy at runtime.
2696 set_verified_at_dump_time();
2697 }
2698
2699 _misc_flags.set_has_init_deps_processed(false);
2700
2701 Klass::remove_unshareable_info();
2702
2703 if (SystemDictionaryShared::has_class_failed_verification(this)) {
2704 // Classes are attempted to link during dumping and may fail,
2705 // but these classes are still in the dictionary and class list in CLD.
2706 // If the class has failed verification, there is nothing else to remove.
2707 return;
2708 }
2709
2710 // Reset to the 'allocated' state to prevent any premature accessing to
2711 // a shared class at runtime while the class is still being loaded and
2712 // restored. A class' init_state is set to 'loaded' at runtime when it's
2713 // being added to class hierarchy (see InstanceKlass:::add_to_hierarchy()).
2714 _init_state = allocated;
2715
2716 { // Otherwise this needs to take out the Compile_lock.
2717 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2718 init_implementor();
2719 }
2720
2721 // Call remove_unshareable_info() on other objects that belong to this class, except
2722 // for constants()->remove_unshareable_info(), which is called in a separate pass in
2723 // ArchiveBuilder::make_klasses_shareable(),
2724
2725 for (int i = 0; i < methods()->length(); i++) {
2726 Method* m = methods()->at(i);
2727 m->remove_unshareable_info();
2728 }
2729
2730 // do array classes also.
2731 if (array_klasses() != nullptr) {
2732 array_klasses()->remove_unshareable_info();
2733 }
2734
2735 // These are not allocated from metaspace. They are safe to set to null.
2736 _source_debug_extension = nullptr;
2737 _dep_context = nullptr;
2738 _osr_nmethods_head = nullptr;
2739 #if INCLUDE_JVMTI
2740 _breakpoints = nullptr;
2741 _previous_versions = nullptr;
2742 _cached_class_file = nullptr;
2743 _jvmti_cached_class_field_map = nullptr;
2744 #endif
2745
2746 _init_thread = nullptr;
2747 _methods_jmethod_ids = nullptr;
2748 _jni_ids = nullptr;
2749 _oop_map_cache = nullptr;
2750 if (CDSConfig::is_dumping_method_handles() && HeapShared::is_lambda_proxy_klass(this)) {
2751 // keep _nest_host
2752 } else {
2753 // clear _nest_host to ensure re-load at runtime
2754 _nest_host = nullptr;
2755 }
2756 init_shared_package_entry();
2757 _dep_context_last_cleaned = 0;
2758 DEBUG_ONLY(_shared_class_load_count = 0);
2759
2760 remove_unshareable_flags();
2761
2762 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
2763 }
2764
2765 void InstanceKlass::remove_unshareable_flags() {
2766 // clear all the flags/stats that shouldn't be in the archived version
2767 assert(!is_scratch_class(), "must be");
2768 assert(!has_been_redefined(), "must be");
2769 #if INCLUDE_JVMTI
2770 set_is_being_redefined(false);
2771 #endif
2772 set_has_resolved_methods(false);
2773 }
2774
2775 void InstanceKlass::remove_java_mirror() {
2776 Klass::remove_java_mirror();
2777
2778 // do array classes also.
2779 if (array_klasses() != nullptr) {
2780 array_klasses()->remove_java_mirror();
2781 }
2782 }
2783
2784 void InstanceKlass::init_shared_package_entry() {
2785 assert(CDSConfig::is_dumping_archive(), "must be");
2786 #if !INCLUDE_CDS_JAVA_HEAP
2787 _package_entry = nullptr;
2788 #else
2789 if (CDSConfig::is_dumping_full_module_graph()) {
2790 if (defined_by_other_loaders()) {
2791 _package_entry = nullptr;
2792 } else {
2793 _package_entry = PackageEntry::get_archived_entry(_package_entry);
2794 }
2795 } else if (CDSConfig::is_dumping_dynamic_archive() &&
2796 CDSConfig::is_using_full_module_graph() &&
2797 AOTMetaspace::in_aot_cache(_package_entry)) {
2798 // _package_entry is an archived package in the base archive. Leave it as is.
2799 } else {
2800 _package_entry = nullptr;
2801 }
2802 ArchivePtrMarker::mark_pointer((address**)&_package_entry);
2803 #endif
2804 }
2805
2806 void InstanceKlass::compute_has_loops_flag_for_methods() {
2807 Array<Method*>* methods = this->methods();
2808 for (int index = 0; index < methods->length(); ++index) {
2809 Method* m = methods->at(index);
2810 if (!m->is_overpass()) { // work around JDK-8305771
2811 m->compute_has_loops_flag();
2812 }
2813 }
2814 }
2815
2816 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain,
2817 PackageEntry* pkg_entry, TRAPS) {
2818 // InstanceKlass::add_to_hierarchy() sets the init_state to loaded
2819 // before the InstanceKlass is added to the SystemDictionary. Make
2820 // sure the current state is <loaded.
2821 assert(!is_loaded(), "invalid init state");
2822 assert(!shared_loading_failed(), "Must not try to load failed class again");
2823 set_package(loader_data, pkg_entry, CHECK);
2824 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
2825
2826 Array<Method*>* methods = this->methods();
2827 int num_methods = methods->length();
2828 for (int index = 0; index < num_methods; ++index) {
2829 methods->at(index)->restore_unshareable_info(CHECK);
2830 }
2831 #if INCLUDE_JVMTI
2832 if (JvmtiExport::has_redefined_a_class()) {
2833 // Reinitialize vtable because RedefineClasses may have changed some
2834 // entries in this vtable for super classes so the CDS vtable might
2835 // point to old or obsolete entries. RedefineClasses doesn't fix up
2836 // vtables in the shared system dictionary, only the main one.
2837 // It also redefines the itable too so fix that too.
2838 // First fix any default methods that point to a super class that may
2839 // have been redefined.
2840 bool trace_name_printed = false;
2841 adjust_default_methods(&trace_name_printed);
2842 if (verified_at_dump_time()) {
2843 // Initialize vtable and itable for classes which can be verified at dump time.
2844 // Unlinked classes such as old classes with major version < 50 cannot be verified
2845 // at dump time.
2846 vtable().initialize_vtable();
2847 itable().initialize_itable();
2848 }
2849 }
2850 #endif // INCLUDE_JVMTI
2851
2852 // restore constant pool resolved references
2853 constants()->restore_unshareable_info(CHECK);
2854
2855 if (array_klasses() != nullptr) {
2856 // To get a consistent list of classes we need MultiArray_lock to ensure
2857 // array classes aren't observed while they are being restored.
2858 RecursiveLocker rl(MultiArray_lock, THREAD);
2859 assert(this == array_klasses()->bottom_klass(), "sanity");
2860 // Array classes have null protection domain.
2861 // --> see ArrayKlass::complete_create_array_klass()
2862 array_klasses()->restore_unshareable_info(class_loader_data(), Handle(), CHECK);
2863 }
2864
2865 // Initialize @ValueBased class annotation if not already set in the archived klass.
2866 if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation() && !is_value_based()) {
2867 set_is_value_based();
2868 }
2869
2870 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
2871 }
2872
2873 bool InstanceKlass::can_be_verified_at_dumptime() const {
2874 if (CDSConfig::is_dumping_dynamic_archive() && AOTMetaspace::in_aot_cache(this)) {
2875 // This is a class that was dumped into the base archive, so we know
2876 // it was verified at dump time.
2877 return true;
2878 }
2879
2880 if (CDSConfig::is_preserving_verification_constraints()) {
2881 return true;
2882 }
2883
2884 if (CDSConfig::is_old_class_for_verifier(this)) {
2885 // The old verifier does not save verification constraints, so at run time
2886 // SystemDictionaryShared::check_verification_constraints() will not work for this class.
2887 return false;
2888 }
2889 if (super() != nullptr && !super()->can_be_verified_at_dumptime()) {
2890 return false;
2891 }
2892 Array<InstanceKlass*>* interfaces = local_interfaces();
2893 int len = interfaces->length();
2894 for (int i = 0; i < len; i++) {
2895 if (!interfaces->at(i)->can_be_verified_at_dumptime()) {
2896 return false;
2897 }
2898 }
2899 return true;
2900 }
2901
2902 #endif // INCLUDE_CDS
2903
2904 #if INCLUDE_JVMTI
2905 static void clear_all_breakpoints(Method* m) {
2906 m->clear_all_breakpoints();
2907 }
2908 #endif
2909
2910 void InstanceKlass::unload_class(InstanceKlass* ik) {
2911
2912 if (ik->is_scratch_class()) {
2913 assert(ik->dependencies().is_empty(), "dependencies should be empty for scratch classes");
2914 return;
2915 }
2916 assert(ik->is_loaded(), "class should be loaded " PTR_FORMAT, p2i(ik));
2917
2918 // Release dependencies.
2919 ik->dependencies().remove_all_dependents();
2920
2921 // notify the debugger
2922 if (JvmtiExport::should_post_class_unload()) {
2923 JvmtiExport::post_class_unload(ik);
2924 }
2925
2926 // notify ClassLoadingService of class unload
2927 ClassLoadingService::notify_class_unloaded(ik);
2928
2929 SystemDictionaryShared::handle_class_unloading(ik);
2930
2931 if (log_is_enabled(Info, class, unload)) {
2932 ResourceMark rm;
2933 log_info(class, unload)("unloading class %s " PTR_FORMAT, ik->external_name(), p2i(ik));
2934 }
2935
2936 Events::log_class_unloading(Thread::current(), ik);
2937
2938 #if INCLUDE_JFR
2939 assert(ik != nullptr, "invariant");
2940 EventClassUnload event;
2941 event.set_unloadedClass(ik);
2942 event.set_definingClassLoader(ik->class_loader_data());
2943 event.commit();
2944 #endif
2945 }
2946
2947 static void method_release_C_heap_structures(Method* m) {
2948 m->release_C_heap_structures();
2949 }
2950
2951 // Called also by InstanceKlass::deallocate_contents, with false for release_sub_metadata.
2952 void InstanceKlass::release_C_heap_structures(bool release_sub_metadata) {
2953 // Clean up C heap
2954 Klass::release_C_heap_structures();
2955
2956 // Deallocate and call destructors for MDO mutexes
2957 if (release_sub_metadata) {
2958 methods_do(method_release_C_heap_structures);
2959 }
2960
2961 // Deallocate oop map cache
2962 if (_oop_map_cache != nullptr) {
2963 delete _oop_map_cache;
2964 _oop_map_cache = nullptr;
2965 }
2966
2967 // Deallocate JNI identifiers for jfieldIDs
2968 JNIid::deallocate(jni_ids());
2969 set_jni_ids(nullptr);
2970
2971 jmethodID* jmeths = _methods_jmethod_ids;
2972 if (jmeths != nullptr) {
2973 release_set_methods_jmethod_ids(nullptr);
2974 FreeHeap(jmeths);
2975 }
2976
2977 assert(_dep_context == nullptr,
2978 "dependencies should already be cleaned");
2979
2980 #if INCLUDE_JVMTI
2981 // Deallocate breakpoint records
2982 if (breakpoints() != nullptr) {
2983 methods_do(clear_all_breakpoints);
2984 assert(breakpoints() == nullptr, "should have cleared breakpoints");
2985 }
2986
2987 // deallocate the cached class file
2988 if (_cached_class_file != nullptr) {
2989 os::free(_cached_class_file);
2990 _cached_class_file = nullptr;
2991 }
2992 #endif
2993
2994 FREE_C_HEAP_ARRAY(char, _source_debug_extension);
2995
2996 if (release_sub_metadata) {
2997 constants()->release_C_heap_structures();
2998 }
2999 }
3000
3001 // The constant pool is on stack if any of the methods are executing or
3002 // referenced by handles.
3003 bool InstanceKlass::on_stack() const {
3004 return _constants->on_stack();
3005 }
3006
3007 Symbol* InstanceKlass::source_file_name() const { return _constants->source_file_name(); }
3008 u2 InstanceKlass::source_file_name_index() const { return _constants->source_file_name_index(); }
3009 void InstanceKlass::set_source_file_name_index(u2 sourcefile_index) { _constants->set_source_file_name_index(sourcefile_index); }
3010
3011 // minor and major version numbers of class file
3012 u2 InstanceKlass::minor_version() const { return _constants->minor_version(); }
3013 void InstanceKlass::set_minor_version(u2 minor_version) { _constants->set_minor_version(minor_version); }
3014 u2 InstanceKlass::major_version() const { return _constants->major_version(); }
3015 void InstanceKlass::set_major_version(u2 major_version) { _constants->set_major_version(major_version); }
3016
3017 const InstanceKlass* InstanceKlass::get_klass_version(int version) const {
3018 for (const InstanceKlass* ik = this; ik != nullptr; ik = ik->previous_versions()) {
3019 if (ik->constants()->version() == version) {
3020 return ik;
3021 }
3022 }
3023 return nullptr;
3024 }
3025
3026 void InstanceKlass::set_source_debug_extension(const char* array, int length) {
3027 if (array == nullptr) {
3028 _source_debug_extension = nullptr;
3029 } else {
3030 // Adding one to the attribute length in order to store a null terminator
3031 // character could cause an overflow because the attribute length is
3032 // already coded with an u4 in the classfile, but in practice, it's
3033 // unlikely to happen.
3034 assert((length+1) > length, "Overflow checking");
3035 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
3036 for (int i = 0; i < length; i++) {
3037 sde[i] = array[i];
3038 }
3039 sde[length] = '\0';
3040 _source_debug_extension = sde;
3041 }
3042 }
3043
3044 Symbol* InstanceKlass::generic_signature() const { return _constants->generic_signature(); }
3045 u2 InstanceKlass::generic_signature_index() const { return _constants->generic_signature_index(); }
3046 void InstanceKlass::set_generic_signature_index(u2 sig_index) { _constants->set_generic_signature_index(sig_index); }
3047
3048 const char* InstanceKlass::signature_name() const {
3049
3050 // Get the internal name as a c string
3051 const char* src = (const char*) (name()->as_C_string());
3052 const int src_length = (int)strlen(src);
3053
3054 char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
3055
3056 // Add L as type indicator
3057 int dest_index = 0;
3058 dest[dest_index++] = JVM_SIGNATURE_CLASS;
3059
3060 // Add the actual class name
3061 for (int src_index = 0; src_index < src_length; ) {
3062 dest[dest_index++] = src[src_index++];
3063 }
3064
3065 if (is_hidden()) { // Replace the last '+' with a '.'.
3066 for (int index = (int)src_length; index > 0; index--) {
3067 if (dest[index] == '+') {
3068 dest[index] = JVM_SIGNATURE_DOT;
3069 break;
3070 }
3071 }
3072 }
3073
3074 // Add the semicolon and the null
3075 dest[dest_index++] = JVM_SIGNATURE_ENDCLASS;
3076 dest[dest_index] = '\0';
3077 return dest;
3078 }
3079
3080 ModuleEntry* InstanceKlass::module() const {
3081 if (is_hidden() &&
3082 in_unnamed_package() &&
3083 class_loader_data()->has_class_mirror_holder()) {
3084 // For a non-strong hidden class defined to an unnamed package,
3085 // its (class held) CLD will not have an unnamed module created for it.
3086 // Two choices to find the correct ModuleEntry:
3087 // 1. If hidden class is within a nest, use nest host's module
3088 // 2. Find the unnamed module off from the class loader
3089 // For now option #2 is used since a nest host is not set until
3090 // after the instance class is created in jvm_lookup_define_class().
3091 if (class_loader_data()->is_boot_class_loader_data()) {
3092 return ClassLoaderData::the_null_class_loader_data()->unnamed_module();
3093 } else {
3094 oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader());
3095 assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module");
3096 return java_lang_Module::module_entry(module);
3097 }
3098 }
3099
3100 // Class is in a named package
3101 if (!in_unnamed_package()) {
3102 return _package_entry->module();
3103 }
3104
3105 // Class is in an unnamed package, return its loader's unnamed module
3106 return class_loader_data()->unnamed_module();
3107 }
3108
3109 bool InstanceKlass::in_javabase_module() const {
3110 return module()->name() == vmSymbols::java_base();
3111 }
3112
3113 void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPS) {
3114
3115 // ensure java/ packages only loaded by boot or platform builtin loaders
3116 // not needed for shared class since CDS does not archive prohibited classes.
3117 if (!in_aot_cache()) {
3118 check_prohibited_package(name(), loader_data, CHECK);
3119 }
3120
3121 if (in_aot_cache() && _package_entry != nullptr) {
3122 if (CDSConfig::is_using_full_module_graph() && _package_entry == pkg_entry) {
3123 // we can use the saved package
3124 assert(AOTMetaspace::in_aot_cache(_package_entry), "must be");
3125 return;
3126 } else {
3127 _package_entry = nullptr;
3128 }
3129 }
3130
3131 // ClassLoader::package_from_class_name has already incremented the refcount of the symbol
3132 // it returns, so we need to decrement it when the current function exits.
3133 TempNewSymbol from_class_name =
3134 (pkg_entry != nullptr) ? nullptr : ClassLoader::package_from_class_name(name());
3135
3136 Symbol* pkg_name;
3137 if (pkg_entry != nullptr) {
3138 pkg_name = pkg_entry->name();
3139 } else {
3140 pkg_name = from_class_name;
3141 }
3142
3143 if (pkg_name != nullptr && loader_data != nullptr) {
3144
3145 // Find in class loader's package entry table.
3146 _package_entry = pkg_entry != nullptr ? pkg_entry : loader_data->packages()->lookup_only(pkg_name);
3147
3148 // If the package name is not found in the loader's package
3149 // entry table, it is an indication that the package has not
3150 // been defined. Consider it defined within the unnamed module.
3151 if (_package_entry == nullptr) {
3152
3153 if (!ModuleEntryTable::javabase_defined()) {
3154 // Before java.base is defined during bootstrapping, define all packages in
3155 // the java.base module. If a non-java.base package is erroneously placed
3156 // in the java.base module it will be caught later when java.base
3157 // is defined by ModuleEntryTable::verify_javabase_packages check.
3158 assert(ModuleEntryTable::javabase_moduleEntry() != nullptr, JAVA_BASE_NAME " module is null");
3159 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, ModuleEntryTable::javabase_moduleEntry());
3160 } else {
3161 assert(loader_data->unnamed_module() != nullptr, "unnamed module is null");
3162 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, loader_data->unnamed_module());
3163 }
3164
3165 // A package should have been successfully created
3166 DEBUG_ONLY(ResourceMark rm(THREAD));
3167 assert(_package_entry != nullptr, "Package entry for class %s not found, loader %s",
3168 name()->as_C_string(), loader_data->loader_name_and_id());
3169 }
3170
3171 if (log_is_enabled(Debug, module)) {
3172 ResourceMark rm(THREAD);
3173 ModuleEntry* m = _package_entry->module();
3174 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s",
3175 external_name(),
3176 pkg_name->as_C_string(),
3177 loader_data->loader_name_and_id(),
3178 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE));
3179 }
3180 } else {
3181 ResourceMark rm(THREAD);
3182 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
3183 external_name(),
3184 (loader_data != nullptr) ? loader_data->loader_name_and_id() : "null",
3185 UNNAMED_MODULE);
3186 }
3187 }
3188
3189 // Function set_classpath_index ensures that for a non-null _package_entry
3190 // of the InstanceKlass, the entry is in the boot loader's package entry table.
3191 // It then sets the classpath_index in the package entry record.
3192 //
3193 // The classpath_index field is used to find the entry on the boot loader class
3194 // path for packages with classes loaded by the boot loader from -Xbootclasspath/a
3195 // in an unnamed module. It is also used to indicate (for all packages whose
3196 // classes are loaded by the boot loader) that at least one of the package's
3197 // classes has been loaded.
3198 void InstanceKlass::set_classpath_index(s2 path_index) {
3199 if (_package_entry != nullptr) {
3200 DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();)
3201 assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same");
3202 assert(path_index != -1, "Unexpected classpath_index");
3203 _package_entry->set_classpath_index(path_index);
3204 }
3205 }
3206
3207 // different versions of is_same_class_package
3208
3209 bool InstanceKlass::is_same_class_package(const Klass* class2) const {
3210 oop classloader1 = this->class_loader();
3211 PackageEntry* classpkg1 = this->package();
3212 if (class2->is_objArray_klass()) {
3213 class2 = ObjArrayKlass::cast(class2)->bottom_klass();
3214 }
3215
3216 oop classloader2;
3217 PackageEntry* classpkg2;
3218 if (class2->is_instance_klass()) {
3219 classloader2 = class2->class_loader();
3220 classpkg2 = class2->package();
3221 } else {
3222 assert(class2->is_typeArray_klass(), "should be type array");
3223 classloader2 = nullptr;
3224 classpkg2 = nullptr;
3225 }
3226
3227 // Same package is determined by comparing class loader
3228 // and package entries. Both must be the same. This rule
3229 // applies even to classes that are defined in the unnamed
3230 // package, they still must have the same class loader.
3231 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
3232 return true;
3233 }
3234
3235 return false;
3236 }
3237
3238 // return true if this class and other_class are in the same package. Classloader
3239 // and classname information is enough to determine a class's package
3240 bool InstanceKlass::is_same_class_package(oop other_class_loader,
3241 const Symbol* other_class_name) const {
3242 if (class_loader() != other_class_loader) {
3243 return false;
3244 }
3245 if (name()->fast_compare(other_class_name) == 0) {
3246 return true;
3247 }
3248
3249 {
3250 ResourceMark rm;
3251
3252 bool bad_class_name = false;
3253 TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name);
3254 if (bad_class_name) {
3255 return false;
3256 }
3257 // Check that package_from_class_name() returns null, not "", if there is no package.
3258 assert(other_pkg == nullptr || other_pkg->utf8_length() > 0, "package name is empty string");
3259
3260 const Symbol* const this_package_name =
3261 this->package() != nullptr ? this->package()->name() : nullptr;
3262
3263 if (this_package_name == nullptr || other_pkg == nullptr) {
3264 // One of the two doesn't have a package. Only return true if the other
3265 // one also doesn't have a package.
3266 return this_package_name == other_pkg;
3267 }
3268
3269 // Check if package is identical
3270 return this_package_name->fast_compare(other_pkg) == 0;
3271 }
3272 }
3273
3274 static bool is_prohibited_package_slow(Symbol* class_name) {
3275 // Caller has ResourceMark
3276 int length;
3277 jchar* unicode = class_name->as_unicode(length);
3278 return (length >= 5 &&
3279 unicode[0] == 'j' &&
3280 unicode[1] == 'a' &&
3281 unicode[2] == 'v' &&
3282 unicode[3] == 'a' &&
3283 unicode[4] == '/');
3284 }
3285
3286 // Only boot and platform class loaders can define classes in "java/" packages.
3287 void InstanceKlass::check_prohibited_package(Symbol* class_name,
3288 ClassLoaderData* loader_data,
3289 TRAPS) {
3290 if (!loader_data->is_boot_class_loader_data() &&
3291 !loader_data->is_platform_class_loader_data() &&
3292 class_name != nullptr && class_name->utf8_length() >= 5) {
3293 ResourceMark rm(THREAD);
3294 bool prohibited;
3295 const u1* base = class_name->base();
3296 if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) {
3297 prohibited = is_prohibited_package_slow(class_name);
3298 } else {
3299 char* name = class_name->as_C_string();
3300 prohibited = (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/');
3301 }
3302 if (prohibited) {
3303 TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name);
3304 assert(pkg_name != nullptr, "Error in parsing package name starting with 'java/'");
3305 char* name = pkg_name->as_C_string();
3306 const char* class_loader_name = loader_data->loader_name_and_id();
3307 StringUtils::replace_no_expand(name, "/", ".");
3308 const char* msg_text1 = "Class loader (instance of): ";
3309 const char* msg_text2 = " tried to load prohibited package name: ";
3310 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
3311 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len);
3312 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
3313 THROW_MSG(vmSymbols::java_lang_SecurityException(), message);
3314 }
3315 }
3316 return;
3317 }
3318
3319 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const {
3320 constantPoolHandle i_cp(THREAD, constants());
3321 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
3322 int ioff = iter.inner_class_info_index();
3323 if (ioff != 0) {
3324 // Check to see if the name matches the class we're looking for
3325 // before attempting to find the class.
3326 if (i_cp->klass_name_at_matches(this, ioff)) {
3327 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
3328 if (this == inner_klass) {
3329 *ooff = iter.outer_class_info_index();
3330 *noff = iter.inner_name_index();
3331 return true;
3332 }
3333 }
3334 }
3335 }
3336 return false;
3337 }
3338
3339 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const {
3340 InstanceKlass* outer_klass = nullptr;
3341 *inner_is_member = false;
3342 int ooff = 0, noff = 0;
3343 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD);
3344 if (has_inner_classes_attr) {
3345 constantPoolHandle i_cp(THREAD, constants());
3346 if (ooff != 0) {
3347 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);
3348 if (!ok->is_instance_klass()) {
3349 // If the outer class is not an instance klass then it cannot have
3350 // declared any inner classes.
3351 ResourceMark rm(THREAD);
3352 // Names are all known to be < 64k so we know this formatted message is not excessively large.
3353 Exceptions::fthrow(
3354 THREAD_AND_LOCATION,
3355 vmSymbols::java_lang_IncompatibleClassChangeError(),
3356 "%s and %s disagree on InnerClasses attribute",
3357 ok->external_name(),
3358 external_name());
3359 return nullptr;
3360 }
3361 outer_klass = InstanceKlass::cast(ok);
3362 *inner_is_member = true;
3363 }
3364 if (nullptr == outer_klass) {
3365 // It may be a local class; try for that.
3366 int encl_method_class_idx = enclosing_method_class_index();
3367 if (encl_method_class_idx != 0) {
3368 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL);
3369 outer_klass = InstanceKlass::cast(ok);
3370 *inner_is_member = false;
3371 }
3372 }
3373 }
3374
3375 // If no inner class attribute found for this class.
3376 if (nullptr == outer_klass) return nullptr;
3377
3378 // Throws an exception if outer klass has not declared k as an inner klass
3379 // We need evidence that each klass knows about the other, or else
3380 // the system could allow a spoof of an inner class to gain access rights.
3381 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL);
3382 return outer_klass;
3383 }
3384
3385 u2 InstanceKlass::compute_modifier_flags() const {
3386 u2 access = access_flags().as_unsigned_short();
3387
3388 // But check if it happens to be member class.
3389 InnerClassesIterator iter(this);
3390 for (; !iter.done(); iter.next()) {
3391 int ioff = iter.inner_class_info_index();
3392 // Inner class attribute can be zero, skip it.
3393 // Strange but true: JVM spec. allows null inner class refs.
3394 if (ioff == 0) continue;
3395
3396 // only look at classes that are already loaded
3397 // since we are looking for the flags for our self.
3398 Symbol* inner_name = constants()->klass_name_at(ioff);
3399 if (name() == inner_name) {
3400 // This is really a member class.
3401 access = iter.inner_access_flags();
3402 break;
3403 }
3404 }
3405 // Remember to strip ACC_SUPER bit
3406 return (access & (~JVM_ACC_SUPER));
3407 }
3408
3409 jint InstanceKlass::jvmti_class_status() const {
3410 jint result = 0;
3411
3412 if (is_linked()) {
3413 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
3414 }
3415
3416 if (is_initialized()) {
3417 assert(is_linked(), "Class status is not consistent");
3418 result |= JVMTI_CLASS_STATUS_INITIALIZED;
3419 }
3420 if (is_in_error_state()) {
3421 result |= JVMTI_CLASS_STATUS_ERROR;
3422 }
3423 return result;
3424 }
3425
3426 Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPS) {
3427 bool implements_interface; // initialized by method_at_itable_or_null
3428 Method* m = method_at_itable_or_null(holder, index,
3429 implements_interface); // out parameter
3430 if (m != nullptr) {
3431 assert(implements_interface, "sanity");
3432 return m;
3433 } else if (implements_interface) {
3434 // Throw AbstractMethodError since corresponding itable slot is empty.
3435 THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
3436 } else {
3437 // If the interface isn't implemented by the receiver class,
3438 // the VM should throw IncompatibleClassChangeError.
3439 ResourceMark rm(THREAD);
3440 stringStream ss;
3441 bool same_module = (module() == holder->module());
3442 ss.print("Receiver class %s does not implement "
3443 "the interface %s defining the method to be called "
3444 "(%s%s%s)",
3445 external_name(), holder->external_name(),
3446 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
3447 (same_module) ? "" : "; ",
3448 (same_module) ? "" : holder->class_in_module_of_loader());
3449 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string());
3450 }
3451 }
3452
3453 Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) {
3454 klassItable itable(this);
3455 for (int i = 0; i < itable.size_offset_table(); i++) {
3456 itableOffsetEntry* offset_entry = itable.offset_entry(i);
3457 if (offset_entry->interface_klass() == holder) {
3458 implements_interface = true;
3459 itableMethodEntry* ime = offset_entry->first_method_entry(this);
3460 Method* m = ime[index].method();
3461 return m;
3462 }
3463 }
3464 implements_interface = false;
3465 return nullptr; // offset entry not found
3466 }
3467
3468 int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) {
3469 assert(is_linked(), "required");
3470 assert(intf_method->method_holder()->is_interface(), "not an interface method");
3471 assert(is_subtype_of(intf_method->method_holder()), "interface not implemented");
3472
3473 int vtable_index = Method::invalid_vtable_index;
3474 Symbol* name = intf_method->name();
3475 Symbol* signature = intf_method->signature();
3476
3477 // First check in default method array
3478 if (!intf_method->is_abstract() && default_methods() != nullptr) {
3479 int index = find_method_index(default_methods(),
3480 name, signature,
3481 Klass::OverpassLookupMode::find,
3482 Klass::StaticLookupMode::find,
3483 Klass::PrivateLookupMode::find);
3484 if (index >= 0) {
3485 vtable_index = default_vtable_indices()->at(index);
3486 }
3487 }
3488 if (vtable_index == Method::invalid_vtable_index) {
3489 // get vtable_index for miranda methods
3490 klassVtable vt = vtable();
3491 vtable_index = vt.index_of_miranda(name, signature);
3492 }
3493 return vtable_index;
3494 }
3495
3496 #if INCLUDE_JVMTI
3497 // update default_methods for redefineclasses for methods that are
3498 // not yet in the vtable due to concurrent subclass define and superinterface
3499 // redefinition
3500 // Note: those in the vtable, should have been updated via adjust_method_entries
3501 void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
3502 // search the default_methods for uses of either obsolete or EMCP methods
3503 if (default_methods() != nullptr) {
3504 for (int index = 0; index < default_methods()->length(); index ++) {
3505 Method* old_method = default_methods()->at(index);
3506 if (old_method == nullptr || !old_method->is_old()) {
3507 continue; // skip uninteresting entries
3508 }
3509 assert(!old_method->is_deleted(), "default methods may not be deleted");
3510 Method* new_method = old_method->get_new_method();
3511 default_methods()->at_put(index, new_method);
3512
3513 if (log_is_enabled(Info, redefine, class, update)) {
3514 ResourceMark rm;
3515 if (!(*trace_name_printed)) {
3516 log_info(redefine, class, update)
3517 ("adjust: klassname=%s default methods from name=%s",
3518 external_name(), old_method->method_holder()->external_name());
3519 *trace_name_printed = true;
3520 }
3521 log_debug(redefine, class, update, vtables)
3522 ("default method update: %s(%s) ",
3523 new_method->name()->as_C_string(), new_method->signature()->as_C_string());
3524 }
3525 }
3526 }
3527 }
3528 #endif // INCLUDE_JVMTI
3529
3530 // On-stack replacement stuff
3531 void InstanceKlass::add_osr_nmethod(nmethod* n) {
3532 assert_lock_strong(NMethodState_lock);
3533 #ifndef PRODUCT
3534 nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3535 assert(prev == nullptr || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation),
3536 "redundant OSR recompilation detected. memory leak in CodeCache!");
3537 #endif
3538 // only one compilation can be active
3539 assert(n->is_osr_method(), "wrong kind of nmethod");
3540 n->set_osr_link(osr_nmethods_head());
3541 set_osr_nmethods_head(n);
3542 // Raise the highest osr level if necessary
3543 n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level()));
3544
3545 // Get rid of the osr methods for the same bci that have lower levels.
3546 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3547 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3548 if (inv != nullptr && inv->is_in_use()) {
3549 inv->make_not_entrant(nmethod::InvalidationReason::OSR_INVALIDATION_OF_LOWER_LEVEL);
3550 }
3551 }
3552 }
3553
3554 // Remove osr nmethod from the list. Return true if found and removed.
3555 bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3556 // This is a short non-blocking critical region, so the no safepoint check is ok.
3557 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3558 assert(n->is_osr_method(), "wrong kind of nmethod");
3559 nmethod* last = nullptr;
3560 nmethod* cur = osr_nmethods_head();
3561 int max_level = CompLevel_none; // Find the max comp level excluding n
3562 Method* m = n->method();
3563 // Search for match
3564 bool found = false;
3565 while(cur != nullptr && cur != n) {
3566 if (m == cur->method()) {
3567 // Find max level before n
3568 max_level = MAX2(max_level, cur->comp_level());
3569 }
3570 last = cur;
3571 cur = cur->osr_link();
3572 }
3573 nmethod* next = nullptr;
3574 if (cur == n) {
3575 found = true;
3576 next = cur->osr_link();
3577 if (last == nullptr) {
3578 // Remove first element
3579 set_osr_nmethods_head(next);
3580 } else {
3581 last->set_osr_link(next);
3582 }
3583 }
3584 n->set_osr_link(nullptr);
3585 cur = next;
3586 while (cur != nullptr) {
3587 // Find max level after n
3588 if (m == cur->method()) {
3589 max_level = MAX2(max_level, cur->comp_level());
3590 }
3591 cur = cur->osr_link();
3592 }
3593 m->set_highest_osr_comp_level(max_level);
3594 return found;
3595 }
3596
3597 int InstanceKlass::mark_osr_nmethods(DeoptimizationScope* deopt_scope, const Method* m) {
3598 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3599 nmethod* osr = osr_nmethods_head();
3600 int found = 0;
3601 while (osr != nullptr) {
3602 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3603 if (osr->method() == m) {
3604 deopt_scope->mark(osr);
3605 found++;
3606 }
3607 osr = osr->osr_link();
3608 }
3609 return found;
3610 }
3611
3612 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
3613 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3614 nmethod* osr = osr_nmethods_head();
3615 nmethod* best = nullptr;
3616 while (osr != nullptr) {
3617 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3618 // There can be a time when a c1 osr method exists but we are waiting
3619 // for a c2 version. When c2 completes its osr nmethod we will trash
3620 // the c1 version and only be able to find the c2 version. However
3621 // while we overflow in the c1 code at back branches we don't want to
3622 // try and switch to the same code as we are already running
3623
3624 if (osr->method() == m &&
3625 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
3626 if (match_level) {
3627 if (osr->comp_level() == comp_level) {
3628 // Found a match - return it.
3629 return osr;
3630 }
3631 } else {
3632 if (best == nullptr || (osr->comp_level() > best->comp_level())) {
3633 if (osr->comp_level() == CompilationPolicy::highest_compile_level()) {
3634 // Found the best possible - return it.
3635 return osr;
3636 }
3637 best = osr;
3638 }
3639 }
3640 }
3641 osr = osr->osr_link();
3642 }
3643
3644 assert(match_level == false || best == nullptr, "shouldn't pick up anything if match_level is set");
3645 if (best != nullptr && best->comp_level() >= comp_level) {
3646 return best;
3647 }
3648 return nullptr;
3649 }
3650
3651 // -----------------------------------------------------------------------------------------------------
3652 // Printing
3653
3654 #define BULLET " - "
3655
3656 static const char* state_names[] = {
3657 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
3658 };
3659
3660 static void print_vtable(intptr_t* start, int len, outputStream* st) {
3661 for (int i = 0; i < len; i++) {
3662 intptr_t e = start[i];
3663 st->print("%d : " INTPTR_FORMAT, i, e);
3664 if (MetaspaceObj::is_valid((Metadata*)e)) {
3665 st->print(" ");
3666 ((Metadata*)e)->print_value_on(st);
3667 }
3668 st->cr();
3669 }
3670 }
3671
3672 static void print_vtable(vtableEntry* start, int len, outputStream* st) {
3673 return print_vtable(reinterpret_cast<intptr_t*>(start), len, st);
3674 }
3675
3676 const char* InstanceKlass::init_state_name() const {
3677 return state_names[init_state()];
3678 }
3679
3680 void InstanceKlass::print_on(outputStream* st) const {
3681 assert(is_klass(), "must be klass");
3682 Klass::print_on(st);
3683
3684 st->print(BULLET"instance size: %d", size_helper()); st->cr();
3685 st->print(BULLET"klass size: %d", size()); st->cr();
3686 st->print(BULLET"access: "); access_flags().print_on(st); st->cr();
3687 st->print(BULLET"flags: "); _misc_flags.print_on(st); st->cr();
3688 st->print(BULLET"state: "); st->print_cr("%s", init_state_name());
3689 st->print(BULLET"name: "); name()->print_value_on(st); st->cr();
3690 st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
3691 st->print(BULLET"sub: ");
3692 Klass* sub = subklass();
3693 int n;
3694 for (n = 0; sub != nullptr; n++, sub = sub->next_sibling()) {
3695 if (n < MaxSubklassPrintSize) {
3696 sub->print_value_on(st);
3697 st->print(" ");
3698 }
3699 }
3700 if (n >= MaxSubklassPrintSize) st->print("(%zd more klasses...)", n - MaxSubklassPrintSize);
3701 st->cr();
3702
3703 if (is_interface()) {
3704 st->print_cr(BULLET"nof implementors: %d", nof_implementors());
3705 if (nof_implementors() == 1) {
3706 st->print_cr(BULLET"implementor: ");
3707 st->print(" ");
3708 implementor()->print_value_on(st);
3709 st->cr();
3710 }
3711 }
3712
3713 st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
3714 st->print(BULLET"methods: "); methods()->print_value_on(st); st->cr();
3715 if (Verbose || WizardMode) {
3716 Array<Method*>* method_array = methods();
3717 for (int i = 0; i < method_array->length(); i++) {
3718 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3719 }
3720 }
3721 st->print(BULLET"method ordering: "); method_ordering()->print_value_on(st); st->cr();
3722 if (default_methods() != nullptr) {
3723 st->print(BULLET"default_methods: "); default_methods()->print_value_on(st); st->cr();
3724 if (Verbose) {
3725 Array<Method*>* method_array = default_methods();
3726 for (int i = 0; i < method_array->length(); i++) {
3727 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3728 }
3729 }
3730 }
3731 print_on_maybe_null(st, BULLET"default vtable indices: ", default_vtable_indices());
3732 st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr();
3733 st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
3734
3735 st->print(BULLET"secondary supers: "); secondary_supers()->print_value_on(st); st->cr();
3736
3737 st->print(BULLET"hash_slot: %d", hash_slot()); st->cr();
3738 st->print(BULLET"secondary bitmap: " UINTX_FORMAT_X_0, _secondary_supers_bitmap); st->cr();
3739
3740 if (secondary_supers() != nullptr) {
3741 if (Verbose) {
3742 bool is_hashed = (_secondary_supers_bitmap != SECONDARY_SUPERS_BITMAP_FULL);
3743 st->print_cr(BULLET"---- secondary supers (%d words):", _secondary_supers->length());
3744 for (int i = 0; i < _secondary_supers->length(); i++) {
3745 ResourceMark rm; // for external_name()
3746 Klass* secondary_super = _secondary_supers->at(i);
3747 st->print(BULLET"%2d:", i);
3748 if (is_hashed) {
3749 int home_slot = compute_home_slot(secondary_super, _secondary_supers_bitmap);
3750 int distance = (i - home_slot) & SECONDARY_SUPERS_TABLE_MASK;
3751 st->print(" dist:%02d:", distance);
3752 }
3753 st->print_cr(" %p %s", secondary_super, secondary_super->external_name());
3754 }
3755 }
3756 }
3757 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr();
3758
3759 print_on_maybe_null(st, BULLET"class loader data: ", class_loader_data());
3760 print_on_maybe_null(st, BULLET"source file: ", source_file_name());
3761 if (source_debug_extension() != nullptr) {
3762 st->print(BULLET"source debug extension: ");
3763 st->print("%s", source_debug_extension());
3764 st->cr();
3765 }
3766 print_on_maybe_null(st, BULLET"class annotations: ", class_annotations());
3767 print_on_maybe_null(st, BULLET"class type annotations: ", class_type_annotations());
3768 print_on_maybe_null(st, BULLET"field annotations: ", fields_annotations());
3769 print_on_maybe_null(st, BULLET"field type annotations: ", fields_type_annotations());
3770 {
3771 bool have_pv = false;
3772 // previous versions are linked together through the InstanceKlass
3773 for (InstanceKlass* pv_node = previous_versions();
3774 pv_node != nullptr;
3775 pv_node = pv_node->previous_versions()) {
3776 if (!have_pv)
3777 st->print(BULLET"previous version: ");
3778 have_pv = true;
3779 pv_node->constants()->print_value_on(st);
3780 }
3781 if (have_pv) st->cr();
3782 }
3783
3784 print_on_maybe_null(st, BULLET"generic signature: ", generic_signature());
3785 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr();
3786 st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr();
3787 print_on_maybe_null(st, BULLET"record components: ", record_components());
3788 st->print(BULLET"permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr();
3789 if (java_mirror() != nullptr) {
3790 st->print(BULLET"java mirror: ");
3791 java_mirror()->print_value_on(st);
3792 st->cr();
3793 } else {
3794 st->print_cr(BULLET"java mirror: null");
3795 }
3796 st->print(BULLET"vtable length %d (start addr: " PTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr();
3797 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st);
3798 st->print(BULLET"itable length %d (start addr: " PTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr();
3799 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_itable(), itable_length(), st);
3800 st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
3801
3802 FieldPrinter print_static_field(st);
3803 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
3804 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
3805 FieldPrinter print_nonstatic_field(st);
3806 InstanceKlass* ik = const_cast<InstanceKlass*>(this);
3807 ik->print_nonstatic_fields(&print_nonstatic_field);
3808
3809 st->print(BULLET"non-static oop maps (%d entries): ", nonstatic_oop_map_count());
3810 OopMapBlock* map = start_of_nonstatic_oop_maps();
3811 OopMapBlock* end_map = map + nonstatic_oop_map_count();
3812 while (map < end_map) {
3813 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
3814 map++;
3815 }
3816 st->cr();
3817
3818 if (fieldinfo_search_table() != nullptr) {
3819 st->print_cr(BULLET"---- field info search table:");
3820 FieldInfoStream::print_search_table(st, _constants, _fieldinfo_stream, _fieldinfo_search_table);
3821 }
3822 }
3823
3824 void InstanceKlass::print_value_on(outputStream* st) const {
3825 assert(is_klass(), "must be klass");
3826 if (Verbose || WizardMode) access_flags().print_on(st);
3827 name()->print_value_on(st);
3828 }
3829
3830 void FieldPrinter::do_field(fieldDescriptor* fd) {
3831 _st->print(BULLET);
3832 if (_obj == nullptr) {
3833 fd->print_on(_st);
3834 _st->cr();
3835 } else {
3836 fd->print_on_for(_st, _obj);
3837 _st->cr();
3838 }
3839 }
3840
3841
3842 void InstanceKlass::oop_print_on(oop obj, outputStream* st) {
3843 Klass::oop_print_on(obj, st);
3844
3845 if (this == vmClasses::String_klass()) {
3846 typeArrayOop value = java_lang_String::value(obj);
3847 juint length = java_lang_String::length(obj);
3848 if (value != nullptr &&
3849 value->is_typeArray() &&
3850 length <= (juint) value->length()) {
3851 st->print(BULLET"string: ");
3852 java_lang_String::print(obj, st);
3853 st->cr();
3854 }
3855 }
3856
3857 st->print_cr(BULLET"---- fields (total size %zu words):", oop_size(obj, obj->mark()));
3858 FieldPrinter print_field(st, obj);
3859 print_nonstatic_fields(&print_field);
3860
3861 if (this == vmClasses::Class_klass()) {
3862 st->print(BULLET"signature: ");
3863 java_lang_Class::print_signature(obj, st);
3864 st->cr();
3865 Klass* real_klass = java_lang_Class::as_Klass(obj);
3866 if (real_klass != nullptr && real_klass->is_instance_klass()) {
3867 st->print_cr(BULLET"---- static fields (%d):", java_lang_Class::static_oop_field_count(obj));
3868 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
3869 }
3870 } else if (this == vmClasses::MethodType_klass()) {
3871 st->print(BULLET"signature: ");
3872 java_lang_invoke_MethodType::print_signature(obj, st);
3873 st->cr();
3874 }
3875 }
3876
3877 #ifndef PRODUCT
3878
3879 bool InstanceKlass::verify_itable_index(int i) {
3880 int method_count = klassItable::method_count_for_interface(this);
3881 assert(i >= 0 && i < method_count, "index out of bounds");
3882 return true;
3883 }
3884
3885 #endif //PRODUCT
3886
3887 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
3888 st->print("a ");
3889 name()->print_value_on(st);
3890 obj->print_address_on(st);
3891 if (this == vmClasses::String_klass()
3892 && java_lang_String::value(obj) != nullptr) {
3893 ResourceMark rm;
3894 int len = java_lang_String::length(obj);
3895 int plen = (len < 24 ? len : 12);
3896 char* str = java_lang_String::as_utf8_string(obj, 0, plen);
3897 st->print(" = \"%s\"", str);
3898 if (len > plen)
3899 st->print("...[%d]", len);
3900 } else if (this == vmClasses::Class_klass()) {
3901 Klass* k = java_lang_Class::as_Klass(obj);
3902 st->print(" = ");
3903 if (k != nullptr) {
3904 k->print_value_on(st);
3905 } else {
3906 const char* tname = type2name(java_lang_Class::primitive_type(obj));
3907 st->print("%s", tname ? tname : "type?");
3908 }
3909 } else if (this == vmClasses::MethodType_klass()) {
3910 st->print(" = ");
3911 java_lang_invoke_MethodType::print_signature(obj, st);
3912 } else if (java_lang_boxing_object::is_instance(obj)) {
3913 st->print(" = ");
3914 java_lang_boxing_object::print(obj, st);
3915 } else if (this == vmClasses::LambdaForm_klass()) {
3916 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
3917 if (vmentry != nullptr) {
3918 st->print(" => ");
3919 vmentry->print_value_on(st);
3920 }
3921 } else if (this == vmClasses::MemberName_klass()) {
3922 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
3923 if (vmtarget != nullptr) {
3924 st->print(" = ");
3925 vmtarget->print_value_on(st);
3926 } else {
3927 oop clazz = java_lang_invoke_MemberName::clazz(obj);
3928 oop name = java_lang_invoke_MemberName::name(obj);
3929 if (clazz != nullptr) {
3930 clazz->print_value_on(st);
3931 } else {
3932 st->print("null");
3933 }
3934 st->print(".");
3935 if (name != nullptr) {
3936 name->print_value_on(st);
3937 } else {
3938 st->print("null");
3939 }
3940 }
3941 }
3942 }
3943
3944 const char* InstanceKlass::internal_name() const {
3945 return external_name();
3946 }
3947
3948 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
3949 const ModuleEntry* module_entry,
3950 const ClassFileStream* cfs) const {
3951
3952 if (ClassListWriter::is_enabled()) {
3953 ClassListWriter::write(this, cfs);
3954 }
3955
3956 print_class_load_helper(loader_data, module_entry, cfs);
3957 print_class_load_cause_logging();
3958 }
3959
3960 void InstanceKlass::print_class_load_helper(ClassLoaderData* loader_data,
3961 const ModuleEntry* module_entry,
3962 const ClassFileStream* cfs) const {
3963
3964 if (!log_is_enabled(Info, class, load)) {
3965 return;
3966 }
3967
3968 ResourceMark rm;
3969 LogMessage(class, load) msg;
3970 stringStream info_stream;
3971
3972 // Name and class hierarchy info
3973 info_stream.print("%s", external_name());
3974
3975 // Source
3976 if (cfs != nullptr) {
3977 if (cfs->source() != nullptr) {
3978 const char* module_name = (module_entry->name() == nullptr) ? UNNAMED_MODULE : module_entry->name()->as_C_string();
3979 if (module_name != nullptr) {
3980 // When the boot loader created the stream, it didn't know the module name
3981 // yet. Let's format it now.
3982 if (cfs->from_boot_loader_modules_image()) {
3983 info_stream.print(" source: jrt:/%s", module_name);
3984 } else {
3985 info_stream.print(" source: %s", cfs->source());
3986 }
3987 } else {
3988 info_stream.print(" source: %s", cfs->source());
3989 }
3990 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
3991 Thread* current = Thread::current();
3992 Klass* caller = current->is_Java_thread() ?
3993 JavaThread::cast(current)->security_get_caller_class(1):
3994 nullptr;
3995 // caller can be null, for example, during a JVMTI VM_Init hook
3996 if (caller != nullptr) {
3997 info_stream.print(" source: instance of %s", caller->external_name());
3998 } else {
3999 // source is unknown
4000 }
4001 } else {
4002 oop class_loader = loader_data->class_loader();
4003 info_stream.print(" source: %s", class_loader->klass()->external_name());
4004 }
4005 } else {
4006 assert(this->in_aot_cache(), "must be");
4007 if (AOTMetaspace::in_aot_cache_dynamic_region((void*)this)) {
4008 info_stream.print(" source: shared objects file (top)");
4009 } else {
4010 info_stream.print(" source: shared objects file");
4011 }
4012 }
4013
4014 msg.info("%s", info_stream.as_string());
4015
4016 if (log_is_enabled(Debug, class, load)) {
4017 stringStream debug_stream;
4018
4019 // Class hierarchy info
4020 debug_stream.print(" klass: " PTR_FORMAT " super: " PTR_FORMAT,
4021 p2i(this), p2i(super()));
4022
4023 // Interfaces
4024 if (local_interfaces() != nullptr && local_interfaces()->length() > 0) {
4025 debug_stream.print(" interfaces:");
4026 int length = local_interfaces()->length();
4027 for (int i = 0; i < length; i++) {
4028 debug_stream.print(" " PTR_FORMAT,
4029 p2i(local_interfaces()->at(i)));
4030 }
4031 }
4032
4033 // Class loader
4034 debug_stream.print(" loader: [");
4035 loader_data->print_value_on(&debug_stream);
4036 debug_stream.print("]");
4037
4038 // Classfile checksum
4039 if (cfs) {
4040 debug_stream.print(" bytes: %d checksum: %08x",
4041 cfs->length(),
4042 ClassLoader::crc32(0, (const char*)cfs->buffer(),
4043 cfs->length()));
4044 }
4045
4046 msg.debug("%s", debug_stream.as_string());
4047 }
4048 }
4049
4050 void InstanceKlass::print_class_load_cause_logging() const {
4051 bool log_cause_native = log_is_enabled(Info, class, load, cause, native);
4052 if (log_cause_native || log_is_enabled(Info, class, load, cause)) {
4053 JavaThread* current = JavaThread::current();
4054 ResourceMark rm(current);
4055 const char* name = external_name();
4056
4057 if (LogClassLoadingCauseFor == nullptr ||
4058 (strcmp("*", LogClassLoadingCauseFor) != 0 &&
4059 strstr(name, LogClassLoadingCauseFor) == nullptr)) {
4060 return;
4061 }
4062
4063 // Log Java stack first
4064 {
4065 LogMessage(class, load, cause) msg;
4066 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4067
4068 info_stream.print_cr("Java stack when loading %s:", name);
4069 current->print_stack_on(&info_stream);
4070 }
4071
4072 // Log native stack second
4073 if (log_cause_native) {
4074 // Log to string first so that lines can be indented
4075 stringStream stack_stream;
4076 char buf[O_BUFLEN];
4077 address lastpc = nullptr;
4078 NativeStackPrinter nsp(current);
4079 nsp.print_stack(&stack_stream, buf, sizeof(buf), lastpc,
4080 true /* print_source_info */, -1 /* max stack */);
4081
4082 LogMessage(class, load, cause, native) msg;
4083 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4084 info_stream.print_cr("Native stack when loading %s:", name);
4085
4086 // Print each native stack line to the log
4087 int size = (int) stack_stream.size();
4088 char* stack = stack_stream.as_string();
4089 char* stack_end = stack + size;
4090 char* line_start = stack;
4091 for (char* p = stack; p < stack_end; p++) {
4092 if (*p == '\n') {
4093 *p = '\0';
4094 info_stream.print_cr("\t%s", line_start);
4095 line_start = p + 1;
4096 }
4097 }
4098 if (line_start < stack_end) {
4099 info_stream.print_cr("\t%s", line_start);
4100 }
4101 }
4102 }
4103 }
4104
4105 // Verification
4106
4107 class VerifyFieldClosure: public BasicOopIterateClosure {
4108 protected:
4109 template <class T> void do_oop_work(T* p) {
4110 oop obj = RawAccess<>::oop_load(p);
4111 if (!oopDesc::is_oop_or_null(obj)) {
4112 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj));
4113 Universe::print_on(tty);
4114 guarantee(false, "boom");
4115 }
4116 }
4117 public:
4118 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); }
4119 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
4120 };
4121
4122 void InstanceKlass::verify_on(outputStream* st) {
4123 #ifndef PRODUCT
4124 // Avoid redundant verifies, this really should be in product.
4125 if (_verify_count == Universe::verify_count()) return;
4126 _verify_count = Universe::verify_count();
4127 #endif
4128
4129 // Verify Klass
4130 Klass::verify_on(st);
4131
4132 // Verify that klass is present in ClassLoaderData
4133 guarantee(class_loader_data()->contains_klass(this),
4134 "this class isn't found in class loader data");
4135
4136 // Verify vtables
4137 if (is_linked()) {
4138 // $$$ This used to be done only for m/s collections. Doing it
4139 // always seemed a valid generalization. (DLD -- 6/00)
4140 vtable().verify(st);
4141 }
4142
4143 // Verify first subklass
4144 if (subklass() != nullptr) {
4145 guarantee(subklass()->is_klass(), "should be klass");
4146 }
4147
4148 // Verify siblings
4149 Klass* super = this->super();
4150 Klass* sib = next_sibling();
4151 if (sib != nullptr) {
4152 if (sib == this) {
4153 fatal("subclass points to itself " PTR_FORMAT, p2i(sib));
4154 }
4155
4156 guarantee(sib->is_klass(), "should be klass");
4157 guarantee(sib->super() == super, "siblings should have same superklass");
4158 }
4159
4160 // Verify local interfaces
4161 if (local_interfaces()) {
4162 Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
4163 for (int j = 0; j < local_interfaces->length(); j++) {
4164 InstanceKlass* e = local_interfaces->at(j);
4165 guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
4166 }
4167 }
4168
4169 // Verify transitive interfaces
4170 if (transitive_interfaces() != nullptr) {
4171 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
4172 for (int j = 0; j < transitive_interfaces->length(); j++) {
4173 InstanceKlass* e = transitive_interfaces->at(j);
4174 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
4175 }
4176 }
4177
4178 // Verify methods
4179 if (methods() != nullptr) {
4180 Array<Method*>* methods = this->methods();
4181 for (int j = 0; j < methods->length(); j++) {
4182 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4183 }
4184 for (int j = 0; j < methods->length() - 1; j++) {
4185 Method* m1 = methods->at(j);
4186 Method* m2 = methods->at(j + 1);
4187 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4188 }
4189 }
4190
4191 // Verify method ordering
4192 if (method_ordering() != nullptr) {
4193 Array<int>* method_ordering = this->method_ordering();
4194 int length = method_ordering->length();
4195 if (JvmtiExport::can_maintain_original_method_order() ||
4196 ((CDSConfig::is_using_archive() || CDSConfig::is_dumping_archive()) && length != 0)) {
4197 guarantee(length == methods()->length(), "invalid method ordering length");
4198 jlong sum = 0;
4199 for (int j = 0; j < length; j++) {
4200 int original_index = method_ordering->at(j);
4201 guarantee(original_index >= 0, "invalid method ordering index");
4202 guarantee(original_index < length, "invalid method ordering index");
4203 sum += original_index;
4204 }
4205 // Verify sum of indices 0,1,...,length-1
4206 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
4207 } else {
4208 guarantee(length == 0, "invalid method ordering length");
4209 }
4210 }
4211
4212 // Verify default methods
4213 if (default_methods() != nullptr) {
4214 Array<Method*>* methods = this->default_methods();
4215 for (int j = 0; j < methods->length(); j++) {
4216 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4217 }
4218 for (int j = 0; j < methods->length() - 1; j++) {
4219 Method* m1 = methods->at(j);
4220 Method* m2 = methods->at(j + 1);
4221 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4222 }
4223 }
4224
4225 // Verify JNI static field identifiers
4226 if (jni_ids() != nullptr) {
4227 jni_ids()->verify(this);
4228 }
4229
4230 // Verify other fields
4231 if (constants() != nullptr) {
4232 guarantee(constants()->is_constantPool(), "should be constant pool");
4233 }
4234 }
4235
4236 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
4237 Klass::oop_verify_on(obj, st);
4238 VerifyFieldClosure blk;
4239 obj->oop_iterate(&blk);
4240 }
4241
4242 // JNIid class for jfieldIDs only
4243 // Note to reviewers:
4244 // These JNI functions are just moved over to column 1 and not changed
4245 // in the compressed oops workspace.
4246 JNIid::JNIid(InstanceKlass* holder, int offset, JNIid* next) {
4247 _holder = holder;
4248 _offset = offset;
4249 _next = next;
4250 DEBUG_ONLY(_is_static_field_id = false;)
4251 }
4252
4253 JNIid* JNIid::find(int offset) {
4254 JNIid* current = this;
4255 while (current != nullptr) {
4256 if (current->offset() == offset) return current;
4257 current = current->next();
4258 }
4259 return nullptr;
4260 }
4261
4262 void JNIid::deallocate(JNIid* current) {
4263 while (current != nullptr) {
4264 JNIid* next = current->next();
4265 delete current;
4266 current = next;
4267 }
4268 }
4269
4270 void JNIid::verify(InstanceKlass* holder) {
4271 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields();
4272 int end_field_offset;
4273 end_field_offset = first_field_offset + (holder->static_field_size() * wordSize);
4274
4275 JNIid* current = this;
4276 while (current != nullptr) {
4277 guarantee(current->holder() == holder, "Invalid klass in JNIid");
4278 #ifdef ASSERT
4279 int o = current->offset();
4280 if (current->is_static_field_id()) {
4281 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid");
4282 }
4283 #endif
4284 current = current->next();
4285 }
4286 }
4287
4288 void InstanceKlass::set_init_state(ClassState state) {
4289 #ifdef ASSERT
4290 bool good_state = in_aot_cache() ? (_init_state <= state)
4291 : (_init_state < state);
4292 assert(good_state || state == allocated, "illegal state transition");
4293 #endif
4294 assert(_init_thread == nullptr, "should be cleared before state change");
4295 AtomicAccess::release_store(&_init_state, state);
4296 }
4297
4298 #if INCLUDE_JVMTI
4299
4300 // RedefineClasses() support for previous versions
4301
4302 // Globally, there is at least one previous version of a class to walk
4303 // during class unloading, which is saved because old methods in the class
4304 // are still running. Otherwise the previous version list is cleaned up.
4305 bool InstanceKlass::_should_clean_previous_versions = false;
4306
4307 // Returns true if there are previous versions of a class for class
4308 // unloading only. Also resets the flag to false. purge_previous_version
4309 // will set the flag to true if there are any left, i.e., if there's any
4310 // work to do for next time. This is to avoid the expensive code cache
4311 // walk in CLDG::clean_deallocate_lists().
4312 bool InstanceKlass::should_clean_previous_versions_and_reset() {
4313 bool ret = _should_clean_previous_versions;
4314 log_trace(redefine, class, iklass, purge)("Class unloading: should_clean_previous_versions = %s",
4315 ret ? "true" : "false");
4316 _should_clean_previous_versions = false;
4317 return ret;
4318 }
4319
4320 // This nulls out the jmethodID for all obsolete methods in the previous version of the 'klass'.
4321 // These obsolete methods only exist in the previous version and we're about to delete the memory for them.
4322 // The jmethodID for these are deallocated when we unload the class, so this doesn't remove them from the table.
4323 void InstanceKlass::clear_obsolete_jmethod_ids(InstanceKlass* klass) {
4324 Array<Method*>* method_refs = klass->methods();
4325 for (int k = 0; k < method_refs->length(); k++) {
4326 Method* method = method_refs->at(k);
4327 // Only need to clear obsolete methods.
4328 if (method != nullptr && method->is_obsolete()) {
4329 method->clear_jmethod_id();
4330 }
4331 }
4332 }
4333
4334 // Purge previous versions before adding new previous versions of the class and
4335 // during class unloading.
4336 void InstanceKlass::purge_previous_version_list() {
4337 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
4338 assert(has_been_redefined(), "Should only be called for main class");
4339
4340 // Quick exit.
4341 if (previous_versions() == nullptr) {
4342 return;
4343 }
4344
4345 // This klass has previous versions so see what we can cleanup
4346 // while it is safe to do so.
4347
4348 int deleted_count = 0; // leave debugging breadcrumbs
4349 int live_count = 0;
4350 ClassLoaderData* loader_data = class_loader_data();
4351 assert(loader_data != nullptr, "should never be null");
4352
4353 ResourceMark rm;
4354 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name());
4355
4356 // previous versions are linked together through the InstanceKlass
4357 InstanceKlass* pv_node = previous_versions();
4358 InstanceKlass* last = this;
4359 int version = 0;
4360
4361 // check the previous versions list
4362 for (; pv_node != nullptr; ) {
4363
4364 ConstantPool* pvcp = pv_node->constants();
4365 assert(pvcp != nullptr, "cp ref was unexpectedly cleared");
4366
4367 if (!pvcp->on_stack()) {
4368 // If the constant pool isn't on stack, none of the methods
4369 // are executing. Unlink this previous_version.
4370 // The previous version InstanceKlass is on the ClassLoaderData deallocate list
4371 // so will be deallocated during the next phase of class unloading.
4372 log_trace(redefine, class, iklass, purge)
4373 ("previous version " PTR_FORMAT " is dead.", p2i(pv_node));
4374 // Unlink from previous version list.
4375 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data");
4376 InstanceKlass* next = pv_node->previous_versions();
4377 clear_obsolete_jmethod_ids(pv_node); // jmethodID maintenance for the unloaded class
4378 pv_node->link_previous_versions(nullptr); // point next to null
4379 last->link_previous_versions(next);
4380 // Delete this node directly. Nothing is referring to it and we don't
4381 // want it to increase the counter for metadata to delete in CLDG.
4382 MetadataFactory::free_metadata(loader_data, pv_node);
4383 pv_node = next;
4384 deleted_count++;
4385 version++;
4386 continue;
4387 } else {
4388 assert(pvcp->pool_holder() != nullptr, "Constant pool with no holder");
4389 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
4390 live_count++;
4391 if (pvcp->in_aot_cache()) {
4392 // Shared previous versions can never be removed so no cleaning is needed.
4393 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is shared", p2i(pv_node));
4394 } else {
4395 // Previous version alive, set that clean is needed for next time.
4396 _should_clean_previous_versions = true;
4397 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is alive", p2i(pv_node));
4398 }
4399 }
4400
4401 // next previous version
4402 last = pv_node;
4403 pv_node = pv_node->previous_versions();
4404 version++;
4405 }
4406 log_trace(redefine, class, iklass, purge)
4407 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
4408 }
4409
4410 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
4411 int emcp_method_count) {
4412 int obsolete_method_count = old_methods->length() - emcp_method_count;
4413
4414 if (emcp_method_count != 0 && obsolete_method_count != 0 &&
4415 _previous_versions != nullptr) {
4416 // We have a mix of obsolete and EMCP methods so we have to
4417 // clear out any matching EMCP method entries the hard way.
4418 int local_count = 0;
4419 for (int i = 0; i < old_methods->length(); i++) {
4420 Method* old_method = old_methods->at(i);
4421 if (old_method->is_obsolete()) {
4422 // only obsolete methods are interesting
4423 Symbol* m_name = old_method->name();
4424 Symbol* m_signature = old_method->signature();
4425
4426 // previous versions are linked together through the InstanceKlass
4427 int j = 0;
4428 for (InstanceKlass* prev_version = _previous_versions;
4429 prev_version != nullptr;
4430 prev_version = prev_version->previous_versions(), j++) {
4431
4432 Array<Method*>* method_refs = prev_version->methods();
4433 for (int k = 0; k < method_refs->length(); k++) {
4434 Method* method = method_refs->at(k);
4435
4436 if (!method->is_obsolete() &&
4437 method->name() == m_name &&
4438 method->signature() == m_signature) {
4439 // The current RedefineClasses() call has made all EMCP
4440 // versions of this method obsolete so mark it as obsolete
4441 log_trace(redefine, class, iklass, add)
4442 ("%s(%s): flush obsolete method @%d in version @%d",
4443 m_name->as_C_string(), m_signature->as_C_string(), k, j);
4444
4445 method->set_is_obsolete();
4446 break;
4447 }
4448 }
4449
4450 // The previous loop may not find a matching EMCP method, but
4451 // that doesn't mean that we can optimize and not go any
4452 // further back in the PreviousVersion generations. The EMCP
4453 // method for this generation could have already been made obsolete,
4454 // but there still may be an older EMCP method that has not
4455 // been made obsolete.
4456 }
4457
4458 if (++local_count >= obsolete_method_count) {
4459 // no more obsolete methods so bail out now
4460 break;
4461 }
4462 }
4463 }
4464 }
4465 }
4466
4467 // Save the scratch_class as the previous version if any of the methods are running.
4468 // The previous_versions are used to set breakpoints in EMCP methods and they are
4469 // also used to clean MethodData links to redefined methods that are no longer running.
4470 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
4471 int emcp_method_count) {
4472 assert(Thread::current()->is_VM_thread(),
4473 "only VMThread can add previous versions");
4474
4475 ResourceMark rm;
4476 log_trace(redefine, class, iklass, add)
4477 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
4478
4479 // Clean out old previous versions for this class
4480 purge_previous_version_list();
4481
4482 // Mark newly obsolete methods in remaining previous versions. An EMCP method from
4483 // a previous redefinition may be made obsolete by this redefinition.
4484 Array<Method*>* old_methods = scratch_class->methods();
4485 mark_newly_obsolete_methods(old_methods, emcp_method_count);
4486
4487 // If the constant pool for this previous version of the class
4488 // is not marked as being on the stack, then none of the methods
4489 // in this previous version of the class are on the stack so
4490 // we don't need to add this as a previous version.
4491 ConstantPool* cp_ref = scratch_class->constants();
4492 if (!cp_ref->on_stack()) {
4493 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
4494 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
4495 return;
4496 }
4497
4498 // Add previous version if any methods are still running or if this is
4499 // a shared class which should never be removed.
4500 assert(scratch_class->previous_versions() == nullptr, "shouldn't have a previous version");
4501 scratch_class->link_previous_versions(previous_versions());
4502 link_previous_versions(scratch_class);
4503 if (cp_ref->in_aot_cache()) {
4504 log_trace(redefine, class, iklass, add) ("scratch class added; class is shared");
4505 } else {
4506 // We only set clean_previous_versions flag for processing during class
4507 // unloading for non-shared classes.
4508 _should_clean_previous_versions = true;
4509 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
4510 }
4511 } // end add_previous_version()
4512
4513 #endif // INCLUDE_JVMTI
4514
4515 Method* InstanceKlass::method_with_idnum(int idnum) const {
4516 Method* m = nullptr;
4517 if (idnum < methods()->length()) {
4518 m = methods()->at(idnum);
4519 }
4520 if (m == nullptr || m->method_idnum() != idnum) {
4521 for (int index = 0; index < methods()->length(); ++index) {
4522 m = methods()->at(index);
4523 if (m->method_idnum() == idnum) {
4524 return m;
4525 }
4526 }
4527 // None found, return null for the caller to handle.
4528 return nullptr;
4529 }
4530 return m;
4531 }
4532
4533
4534 Method* InstanceKlass::method_with_orig_idnum(int idnum) const {
4535 if (idnum >= methods()->length()) {
4536 return nullptr;
4537 }
4538 Method* m = methods()->at(idnum);
4539 if (m != nullptr && m->orig_method_idnum() == idnum) {
4540 return m;
4541 }
4542 // Obsolete method idnum does not match the original idnum
4543 for (int index = 0; index < methods()->length(); ++index) {
4544 m = methods()->at(index);
4545 if (m->orig_method_idnum() == idnum) {
4546 return m;
4547 }
4548 }
4549 // None found, return null for the caller to handle.
4550 return nullptr;
4551 }
4552
4553
4554 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) const {
4555 const InstanceKlass* holder = get_klass_version(version);
4556 if (holder == nullptr) {
4557 return nullptr; // The version of klass is gone, no method is found
4558 }
4559 return holder->method_with_orig_idnum(idnum);
4560 }
4561
4562 #if INCLUDE_JVMTI
4563 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
4564 return _cached_class_file;
4565 }
4566
4567 jint InstanceKlass::get_cached_class_file_len() {
4568 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
4569 }
4570
4571 unsigned char * InstanceKlass::get_cached_class_file_bytes() {
4572 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
4573 }
4574 #endif
4575
4576 // Make a step iterating over the class hierarchy under the root class.
4577 // Skips subclasses if requested.
4578 void ClassHierarchyIterator::next() {
4579 assert(_current != nullptr, "required");
4580 if (_visit_subclasses && _current->subklass() != nullptr) {
4581 _current = _current->subklass();
4582 return; // visit next subclass
4583 }
4584 _visit_subclasses = true; // reset
4585 while (_current->next_sibling() == nullptr && _current != _root) {
4586 _current = _current->java_super(); // backtrack; no more sibling subclasses left
4587 }
4588 if (_current == _root) {
4589 // Iteration is over (back at root after backtracking). Invalidate the iterator.
4590 _current = nullptr;
4591 return;
4592 }
4593 _current = _current->next_sibling();
4594 return; // visit next sibling subclass
4595 }