1 /*
2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotClassInitializer.hpp"
26 #include "cds/aotMetaspace.hpp"
27 #include "cds/archiveUtils.hpp"
28 #include "cds/cdsConfig.hpp"
29 #include "cds/cdsEnumKlass.hpp"
30 #include "cds/classListWriter.hpp"
31 #include "cds/heapShared.hpp"
32 #include "classfile/classFileParser.hpp"
33 #include "classfile/classFileStream.hpp"
34 #include "classfile/classLoader.hpp"
35 #include "classfile/classLoaderData.inline.hpp"
36 #include "classfile/javaClasses.hpp"
37 #include "classfile/moduleEntry.hpp"
38 #include "classfile/systemDictionary.hpp"
39 #include "classfile/systemDictionaryShared.hpp"
40 #include "classfile/verifier.hpp"
41 #include "classfile/vmClasses.hpp"
42 #include "classfile/vmSymbols.hpp"
43 #include "code/codeCache.hpp"
44 #include "code/dependencyContext.hpp"
45 #include "compiler/compilationPolicy.hpp"
46 #include "compiler/compileBroker.hpp"
47 #include "gc/shared/collectedHeap.inline.hpp"
48 #include "interpreter/bytecodeStream.hpp"
49 #include "interpreter/oopMapCache.hpp"
50 #include "interpreter/rewriter.hpp"
51 #include "jvm.h"
52 #include "jvmtifiles/jvmti.h"
53 #include "klass.inline.hpp"
54 #include "logging/log.hpp"
55 #include "logging/logMessage.hpp"
56 #include "logging/logStream.hpp"
57 #include "memory/allocation.inline.hpp"
58 #include "memory/iterator.inline.hpp"
59 #include "memory/metadataFactory.hpp"
60 #include "memory/metaspaceClosure.hpp"
61 #include "memory/oopFactory.hpp"
62 #include "memory/resourceArea.hpp"
63 #include "memory/universe.hpp"
64 #include "oops/constantPool.hpp"
65 #include "oops/fieldStreams.inline.hpp"
66 #include "oops/instanceClassLoaderKlass.hpp"
67 #include "oops/instanceKlass.inline.hpp"
68 #include "oops/instanceMirrorKlass.hpp"
69 #include "oops/instanceOop.hpp"
70 #include "oops/instanceStackChunkKlass.hpp"
71 #include "oops/klass.inline.hpp"
72 #include "oops/method.hpp"
73 #include "oops/oop.inline.hpp"
74 #include "oops/recordComponent.hpp"
75 #include "oops/symbol.hpp"
76 #include "prims/jvmtiExport.hpp"
77 #include "prims/jvmtiRedefineClasses.hpp"
78 #include "prims/jvmtiThreadState.hpp"
79 #include "prims/methodComparator.hpp"
80 #include "runtime/arguments.hpp"
81 #include "runtime/atomicAccess.hpp"
82 #include "runtime/deoptimization.hpp"
83 #include "runtime/fieldDescriptor.inline.hpp"
84 #include "runtime/handles.inline.hpp"
85 #include "runtime/javaCalls.hpp"
86 #include "runtime/javaThread.inline.hpp"
87 #include "runtime/mutexLocker.hpp"
88 #include "runtime/orderAccess.hpp"
89 #include "runtime/os.inline.hpp"
90 #include "runtime/reflection.hpp"
91 #include "runtime/synchronizer.hpp"
92 #include "runtime/threads.hpp"
93 #include "services/classLoadingService.hpp"
94 #include "services/finalizerService.hpp"
95 #include "services/threadService.hpp"
96 #include "utilities/dtrace.hpp"
97 #include "utilities/events.hpp"
98 #include "utilities/macros.hpp"
99 #include "utilities/nativeStackPrinter.hpp"
100 #include "utilities/stringUtils.hpp"
101 #ifdef COMPILER1
102 #include "c1/c1_Compiler.hpp"
103 #endif
104 #if INCLUDE_JFR
105 #include "jfr/jfrEvents.hpp"
106 #endif
107
108 #ifdef DTRACE_ENABLED
109
110
111 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
112 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
113 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
114 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
115 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
116 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
117 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
118 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
119 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \
120 { \
121 char* data = nullptr; \
122 int len = 0; \
123 Symbol* clss_name = name(); \
124 if (clss_name != nullptr) { \
125 data = (char*)clss_name->bytes(); \
126 len = clss_name->utf8_length(); \
127 } \
128 HOTSPOT_CLASS_INITIALIZATION_##type( \
129 data, len, (void*)class_loader(), thread_type); \
130 }
131
132 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \
133 { \
134 char* data = nullptr; \
135 int len = 0; \
136 Symbol* clss_name = name(); \
137 if (clss_name != nullptr) { \
138 data = (char*)clss_name->bytes(); \
139 len = clss_name->utf8_length(); \
140 } \
141 HOTSPOT_CLASS_INITIALIZATION_##type( \
142 data, len, (void*)class_loader(), thread_type, wait); \
143 }
144
145 #else // ndef DTRACE_ENABLED
146
147 #define DTRACE_CLASSINIT_PROBE(type, thread_type)
148 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
149
150 #endif // ndef DTRACE_ENABLED
151
152 bool InstanceKlass::_finalization_enabled = true;
153
154 static inline bool is_class_loader(const Symbol* class_name,
155 const ClassFileParser& parser) {
156 assert(class_name != nullptr, "invariant");
157
158 if (class_name == vmSymbols::java_lang_ClassLoader()) {
159 return true;
160 }
161
162 if (vmClasses::ClassLoader_klass_is_loaded()) {
163 const Klass* const super_klass = parser.super_klass();
164 if (super_klass != nullptr) {
165 if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) {
166 return true;
167 }
168 }
169 }
170 return false;
171 }
172
173 static inline bool is_stack_chunk_class(const Symbol* class_name,
174 const ClassLoaderData* loader_data) {
175 return (class_name == vmSymbols::jdk_internal_vm_StackChunk() &&
176 loader_data->is_the_null_class_loader_data());
177 }
178
179 // private: called to verify that k is a static member of this nest.
180 // We know that k is an instance class in the same package and hence the
181 // same classloader.
182 bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const {
183 assert(!is_hidden(), "unexpected hidden class");
184 if (_nest_members == nullptr || _nest_members == Universe::the_empty_short_array()) {
185 if (log_is_enabled(Trace, class, nestmates)) {
186 ResourceMark rm(current);
187 log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s",
188 k->external_name(), this->external_name());
189 }
190 return false;
191 }
192
193 if (log_is_enabled(Trace, class, nestmates)) {
194 ResourceMark rm(current);
195 log_trace(class, nestmates)("Checking nest membership of %s in %s",
196 k->external_name(), this->external_name());
197 }
198
199 // Check for the named class in _nest_members.
200 // We don't resolve, or load, any classes.
201 for (int i = 0; i < _nest_members->length(); i++) {
202 int cp_index = _nest_members->at(i);
203 Symbol* name = _constants->klass_name_at(cp_index);
204 if (name == k->name()) {
205 log_trace(class, nestmates)("- named class found at nest_members[%d] => cp[%d]", i, cp_index);
206 return true;
207 }
208 }
209 log_trace(class, nestmates)("- class is NOT a nest member!");
210 return false;
211 }
212
213 // Called to verify that k is a permitted subclass of this class.
214 // The incoming stringStream is used to format the messages for error logging and for the caller
215 // to use for exception throwing.
216 bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k, stringStream& ss) const {
217 Thread* current = Thread::current();
218 assert(k != nullptr, "sanity check");
219 assert(_permitted_subclasses != nullptr && _permitted_subclasses != Universe::the_empty_short_array(),
220 "unexpected empty _permitted_subclasses array");
221
222 if (log_is_enabled(Trace, class, sealed)) {
223 ResourceMark rm(current);
224 log_trace(class, sealed)("Checking for permitted subclass %s in %s",
225 k->external_name(), this->external_name());
226 }
227
228 // Check that the class and its super are in the same module.
229 if (k->module() != this->module()) {
230 ss.print("Failed same module check: subclass %s is in module '%s' with loader %s, "
231 "and sealed class %s is in module '%s' with loader %s",
232 k->external_name(),
233 k->module()->name_as_C_string(),
234 k->module()->loader_data()->loader_name_and_id(),
235 this->external_name(),
236 this->module()->name_as_C_string(),
237 this->module()->loader_data()->loader_name_and_id());
238 log_trace(class, sealed)(" - %s", ss.as_string());
239 return false;
240 }
241
242 if (!k->is_public() && !is_same_class_package(k)) {
243 ss.print("Failed same package check: non-public subclass %s is in package '%s' with classloader %s, "
244 "and sealed class %s is in package '%s' with classloader %s",
245 k->external_name(),
246 k->package() != nullptr ? k->package()->name()->as_C_string() : "unnamed",
247 k->module()->loader_data()->loader_name_and_id(),
248 this->external_name(),
249 this->package() != nullptr ? this->package()->name()->as_C_string() : "unnamed",
250 this->module()->loader_data()->loader_name_and_id());
251 log_trace(class, sealed)(" - %s", ss.as_string());
252 return false;
253 }
254
255 for (int i = 0; i < _permitted_subclasses->length(); i++) {
256 int cp_index = _permitted_subclasses->at(i);
257 Symbol* name = _constants->klass_name_at(cp_index);
258 if (name == k->name()) {
259 log_trace(class, sealed)("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index);
260 return true;
261 }
262 }
263
264 ss.print("Failed listed permitted subclass check: class %s is not a permitted subclass of %s",
265 k->external_name(), this->external_name());
266 log_trace(class, sealed)(" - %s", ss.as_string());
267 return false;
268 }
269
270 // Return nest-host class, resolving, validating and saving it if needed.
271 // In cases where this is called from a thread that cannot do classloading
272 // (such as a native JIT thread) then we simply return null, which in turn
273 // causes the access check to return false. Such code will retry the access
274 // from a more suitable environment later. Otherwise the _nest_host is always
275 // set once this method returns.
276 // Any errors from nest-host resolution must be preserved so they can be queried
277 // from higher-level access checking code, and reported as part of access checking
278 // exceptions.
279 // VirtualMachineErrors are propagated with a null return.
280 // Under any conditions where the _nest_host can be set to non-null the resulting
281 // value of it and, if applicable, the nest host resolution/validation error,
282 // are idempotent.
283 InstanceKlass* InstanceKlass::nest_host(TRAPS) {
284 InstanceKlass* nest_host_k = _nest_host;
285 if (nest_host_k != nullptr) {
286 return nest_host_k;
287 }
288
289 ResourceMark rm(THREAD);
290
291 // need to resolve and save our nest-host class.
292 if (_nest_host_index != 0) { // we have a real nest_host
293 // Before trying to resolve check if we're in a suitable context
294 bool can_resolve = THREAD->can_call_java();
295 if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) {
296 log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread",
297 this->external_name());
298 return nullptr; // sentinel to say "try again from a different context"
299 }
300
301 log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s",
302 this->external_name(),
303 _constants->klass_name_at(_nest_host_index)->as_C_string());
304
305 Klass* k = _constants->klass_at(_nest_host_index, THREAD);
306 if (HAS_PENDING_EXCEPTION) {
307 if (PENDING_EXCEPTION->is_a(vmClasses::VirtualMachineError_klass())) {
308 return nullptr; // propagate VMEs
309 }
310 stringStream ss;
311 char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string();
312 ss.print("Nest host resolution of %s with host %s failed: ",
313 this->external_name(), target_host_class);
314 java_lang_Throwable::print(PENDING_EXCEPTION, &ss);
315 const char* msg = ss.as_string(true /* on C-heap */);
316 constantPoolHandle cph(THREAD, constants());
317 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
318 CLEAR_PENDING_EXCEPTION;
319
320 log_trace(class, nestmates)("%s", msg);
321 } else {
322 // A valid nest-host is an instance class in the current package that lists this
323 // class as a nest member. If any of these conditions are not met the class is
324 // its own nest-host.
325 const char* error = nullptr;
326
327 // JVMS 5.4.4 indicates package check comes first
328 if (is_same_class_package(k)) {
329 // Now check actual membership. We can't be a member if our "host" is
330 // not an instance class.
331 if (k->is_instance_klass()) {
332 nest_host_k = InstanceKlass::cast(k);
333 bool is_member = nest_host_k->has_nest_member(THREAD, this);
334 if (is_member) {
335 _nest_host = nest_host_k; // save resolved nest-host value
336
337 log_trace(class, nestmates)("Resolved nest-host of %s to %s",
338 this->external_name(), k->external_name());
339 return nest_host_k;
340 } else {
341 error = "current type is not listed as a nest member";
342 }
343 } else {
344 error = "host is not an instance class";
345 }
346 } else {
347 error = "types are in different packages";
348 }
349
350 // something went wrong, so record what and log it
351 {
352 stringStream ss;
353 ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s",
354 this->external_name(),
355 this->class_loader_data()->loader_name_and_id(),
356 k->external_name(),
357 k->class_loader_data()->loader_name_and_id(),
358 error);
359 const char* msg = ss.as_string(true /* on C-heap */);
360 constantPoolHandle cph(THREAD, constants());
361 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
362 log_trace(class, nestmates)("%s", msg);
363 }
364 }
365 } else {
366 log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self",
367 this->external_name());
368 }
369
370 // Either not in an explicit nest, or else an error occurred, so
371 // the nest-host is set to `this`. Any thread that sees this assignment
372 // will also see any setting of nest_host_error(), if applicable.
373 return (_nest_host = this);
374 }
375
376 // Dynamic nest member support: set this class's nest host to the given class.
377 // This occurs as part of the class definition, as soon as the instanceKlass
378 // has been created and doesn't require further resolution. The code:
379 // lookup().defineHiddenClass(bytes_for_X, NESTMATE);
380 // results in:
381 // class_of_X.set_nest_host(lookup().lookupClass().getNestHost())
382 // If it has an explicit _nest_host_index or _nest_members, these will be ignored.
383 // We also know the "host" is a valid nest-host in the same package so we can
384 // assert some of those facts.
385 void InstanceKlass::set_nest_host(InstanceKlass* host) {
386 assert(is_hidden(), "must be a hidden class");
387 assert(host != nullptr, "null nest host specified");
388 assert(_nest_host == nullptr, "current class has resolved nest-host");
389 assert(nest_host_error() == nullptr, "unexpected nest host resolution error exists: %s",
390 nest_host_error());
391 assert((host->_nest_host == nullptr && host->_nest_host_index == 0) ||
392 (host->_nest_host == host), "proposed host is not a valid nest-host");
393 // Can't assert this as package is not set yet:
394 // assert(is_same_class_package(host), "proposed host is in wrong package");
395
396 if (log_is_enabled(Trace, class, nestmates)) {
397 ResourceMark rm;
398 const char* msg = "";
399 // a hidden class does not expect a statically defined nest-host
400 if (_nest_host_index > 0) {
401 msg = "(the NestHost attribute in the current class is ignored)";
402 } else if (_nest_members != nullptr && _nest_members != Universe::the_empty_short_array()) {
403 msg = "(the NestMembers attribute in the current class is ignored)";
404 }
405 log_trace(class, nestmates)("Injected type %s into the nest of %s %s",
406 this->external_name(),
407 host->external_name(),
408 msg);
409 }
410 // set dynamic nest host
411 _nest_host = host;
412 // Record dependency to keep nest host from being unloaded before this class.
413 ClassLoaderData* this_key = class_loader_data();
414 assert(this_key != nullptr, "sanity");
415 this_key->record_dependency(host);
416 }
417
418 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
419 // or we are k's nest_host - all of which is covered by comparing the two
420 // resolved_nest_hosts.
421 // Any exceptions (i.e. VMEs) are propagated.
422 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) {
423
424 assert(this != k, "this should be handled by higher-level code");
425
426 // Per JVMS 5.4.4 we first resolve and validate the current class, then
427 // the target class k.
428
429 InstanceKlass* cur_host = nest_host(CHECK_false);
430 if (cur_host == nullptr) {
431 return false;
432 }
433
434 Klass* k_nest_host = k->nest_host(CHECK_false);
435 if (k_nest_host == nullptr) {
436 return false;
437 }
438
439 bool access = (cur_host == k_nest_host);
440
441 ResourceMark rm(THREAD);
442 log_trace(class, nestmates)("Class %s does %shave nestmate access to %s",
443 this->external_name(),
444 access ? "" : "NOT ",
445 k->external_name());
446 return access;
447 }
448
449 const char* InstanceKlass::nest_host_error() {
450 if (_nest_host_index == 0) {
451 return nullptr;
452 } else {
453 constantPoolHandle cph(Thread::current(), constants());
454 return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index);
455 }
456 }
457
458 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) {
459 const int size = InstanceKlass::size(parser.vtable_size(),
460 parser.itable_size(),
461 nonstatic_oop_map_size(parser.total_oop_map_count()),
462 parser.is_interface());
463
464 const Symbol* const class_name = parser.class_name();
465 assert(class_name != nullptr, "invariant");
466 ClassLoaderData* loader_data = parser.loader_data();
467 assert(loader_data != nullptr, "invariant");
468
469 InstanceKlass* ik;
470
471 // Allocation
472 if (parser.is_instance_ref_klass()) {
473 // java.lang.ref.Reference
474 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser);
475 } else if (class_name == vmSymbols::java_lang_Class()) {
476 // mirror - java.lang.Class
477 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser);
478 } else if (is_stack_chunk_class(class_name, loader_data)) {
479 // stack chunk
480 ik = new (loader_data, size, THREAD) InstanceStackChunkKlass(parser);
481 } else if (is_class_loader(class_name, parser)) {
482 // class loader - java.lang.ClassLoader
483 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser);
484 } else {
485 // normal
486 ik = new (loader_data, size, THREAD) InstanceKlass(parser);
487 }
488
489 if (ik != nullptr && UseCompressedClassPointers) {
490 assert(CompressedKlassPointers::is_encodable(ik),
491 "Klass " PTR_FORMAT "needs a narrow Klass ID, but is not encodable", p2i(ik));
492 }
493
494 // Check for pending exception before adding to the loader data and incrementing
495 // class count. Can get OOM here.
496 if (HAS_PENDING_EXCEPTION) {
497 return nullptr;
498 }
499
500 return ik;
501 }
502
503
504 // copy method ordering from resource area to Metaspace
505 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) {
506 if (m != nullptr) {
507 // allocate a new array and copy contents (memcpy?)
508 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
509 for (int i = 0; i < m->length(); i++) {
510 _method_ordering->at_put(i, m->at(i));
511 }
512 } else {
513 _method_ordering = Universe::the_empty_int_array();
514 }
515 }
516
517 // create a new array of vtable_indices for default methods
518 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
519 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
520 assert(default_vtable_indices() == nullptr, "only create once");
521 set_default_vtable_indices(vtable_indices);
522 return vtable_indices;
523 }
524
525
526 InstanceKlass::InstanceKlass() {
527 assert(CDSConfig::is_dumping_static_archive() || CDSConfig::is_using_archive(), "only for CDS");
528 }
529
530 InstanceKlass::InstanceKlass(const ClassFileParser& parser, KlassKind kind, ReferenceType reference_type) :
531 Klass(kind),
532 _nest_members(nullptr),
533 _nest_host(nullptr),
534 _permitted_subclasses(nullptr),
535 _record_components(nullptr),
536 _static_field_size(parser.static_field_size()),
537 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
538 _itable_len(parser.itable_size()),
539 _nest_host_index(0),
540 _init_state(allocated),
541 _reference_type(reference_type),
542 _init_thread(nullptr)
543 {
544 set_vtable_length(parser.vtable_size());
545 set_access_flags(parser.access_flags());
546 if (parser.is_hidden()) set_is_hidden();
547 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
548 false));
549
550 assert(nullptr == _methods, "underlying memory not zeroed?");
551 assert(is_instance_klass(), "is layout incorrect?");
552 assert(size_helper() == parser.layout_size(), "incorrect size_helper?");
553 }
554
555 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
556 Array<Method*>* methods) {
557 if (methods != nullptr && methods != Universe::the_empty_method_array() &&
558 !methods->in_aot_cache()) {
559 for (int i = 0; i < methods->length(); i++) {
560 Method* method = methods->at(i);
561 if (method == nullptr) continue; // maybe null if error processing
562 // Only want to delete methods that are not executing for RedefineClasses.
563 // The previous version will point to them so they're not totally dangling
564 assert (!method->on_stack(), "shouldn't be called with methods on stack");
565 MetadataFactory::free_metadata(loader_data, method);
566 }
567 MetadataFactory::free_array<Method*>(loader_data, methods);
568 }
569 }
570
571 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
572 const InstanceKlass* super_klass,
573 Array<InstanceKlass*>* local_interfaces,
574 Array<InstanceKlass*>* transitive_interfaces) {
575 // Only deallocate transitive interfaces if not empty, same as super class
576 // or same as local interfaces. See code in parseClassFile.
577 Array<InstanceKlass*>* ti = transitive_interfaces;
578 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
579 // check that the interfaces don't come from super class
580 Array<InstanceKlass*>* sti = (super_klass == nullptr) ? nullptr :
581 super_klass->transitive_interfaces();
582 if (ti != sti && ti != nullptr && !ti->in_aot_cache()) {
583 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
584 }
585 }
586
587 // local interfaces can be empty
588 if (local_interfaces != Universe::the_empty_instance_klass_array() &&
589 local_interfaces != nullptr && !local_interfaces->in_aot_cache()) {
590 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
591 }
592 }
593
594 void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
595 Array<RecordComponent*>* record_components) {
596 if (record_components != nullptr && !record_components->in_aot_cache()) {
597 for (int i = 0; i < record_components->length(); i++) {
598 RecordComponent* record_component = record_components->at(i);
599 MetadataFactory::free_metadata(loader_data, record_component);
600 }
601 MetadataFactory::free_array<RecordComponent*>(loader_data, record_components);
602 }
603 }
604
605 // This function deallocates the metadata and C heap pointers that the
606 // InstanceKlass points to.
607 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
608 // Orphan the mirror first, CMS thinks it's still live.
609 if (java_mirror() != nullptr) {
610 java_lang_Class::set_klass(java_mirror(), nullptr);
611 }
612
613 // Also remove mirror from handles
614 loader_data->remove_handle(_java_mirror);
615
616 // Need to take this class off the class loader data list.
617 loader_data->remove_class(this);
618
619 // The array_klass for this class is created later, after error handling.
620 // For class redefinition, we keep the original class so this scratch class
621 // doesn't have an array class. Either way, assert that there is nothing
622 // to deallocate.
623 assert(array_klasses() == nullptr, "array classes shouldn't be created for this class yet");
624
625 // Release C heap allocated data that this points to, which includes
626 // reference counting symbol names.
627 // Can't release the constant pool or MethodData C heap data here because the constant
628 // pool can be deallocated separately from the InstanceKlass for default methods and
629 // redefine classes. MethodData can also be released separately.
630 release_C_heap_structures(/* release_sub_metadata */ false);
631
632 deallocate_methods(loader_data, methods());
633 set_methods(nullptr);
634
635 deallocate_record_components(loader_data, record_components());
636 set_record_components(nullptr);
637
638 if (method_ordering() != nullptr &&
639 method_ordering() != Universe::the_empty_int_array() &&
640 !method_ordering()->in_aot_cache()) {
641 MetadataFactory::free_array<int>(loader_data, method_ordering());
642 }
643 set_method_ordering(nullptr);
644
645 // default methods can be empty
646 if (default_methods() != nullptr &&
647 default_methods() != Universe::the_empty_method_array() &&
648 !default_methods()->in_aot_cache()) {
649 MetadataFactory::free_array<Method*>(loader_data, default_methods());
650 }
651 // Do NOT deallocate the default methods, they are owned by superinterfaces.
652 set_default_methods(nullptr);
653
654 // default methods vtable indices can be empty
655 if (default_vtable_indices() != nullptr &&
656 !default_vtable_indices()->in_aot_cache()) {
657 MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
658 }
659 set_default_vtable_indices(nullptr);
660
661
662 // This array is in Klass, but remove it with the InstanceKlass since
663 // this place would be the only caller and it can share memory with transitive
664 // interfaces.
665 if (secondary_supers() != nullptr &&
666 secondary_supers() != Universe::the_empty_klass_array() &&
667 // see comments in compute_secondary_supers about the following cast
668 (address)(secondary_supers()) != (address)(transitive_interfaces()) &&
669 !secondary_supers()->in_aot_cache()) {
670 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
671 }
672 set_secondary_supers(nullptr, SECONDARY_SUPERS_BITMAP_EMPTY);
673
674 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
675 set_transitive_interfaces(nullptr);
676 set_local_interfaces(nullptr);
677
678 if (fieldinfo_stream() != nullptr && !fieldinfo_stream()->in_aot_cache()) {
679 MetadataFactory::free_array<u1>(loader_data, fieldinfo_stream());
680 }
681 set_fieldinfo_stream(nullptr);
682
683 if (fieldinfo_search_table() != nullptr && !fieldinfo_search_table()->in_aot_cache()) {
684 MetadataFactory::free_array<u1>(loader_data, fieldinfo_search_table());
685 }
686 set_fieldinfo_search_table(nullptr);
687
688 if (fields_status() != nullptr && !fields_status()->in_aot_cache()) {
689 MetadataFactory::free_array<FieldStatus>(loader_data, fields_status());
690 }
691 set_fields_status(nullptr);
692
693 // If a method from a redefined class is using this constant pool, don't
694 // delete it, yet. The new class's previous version will point to this.
695 if (constants() != nullptr) {
696 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
697 if (!constants()->in_aot_cache()) {
698 MetadataFactory::free_metadata(loader_data, constants());
699 }
700 // Delete any cached resolution errors for the constant pool
701 SystemDictionary::delete_resolution_error(constants());
702
703 set_constants(nullptr);
704 }
705
706 if (inner_classes() != nullptr &&
707 inner_classes() != Universe::the_empty_short_array() &&
708 !inner_classes()->in_aot_cache()) {
709 MetadataFactory::free_array<jushort>(loader_data, inner_classes());
710 }
711 set_inner_classes(nullptr);
712
713 if (nest_members() != nullptr &&
714 nest_members() != Universe::the_empty_short_array() &&
715 !nest_members()->in_aot_cache()) {
716 MetadataFactory::free_array<jushort>(loader_data, nest_members());
717 }
718 set_nest_members(nullptr);
719
720 if (permitted_subclasses() != nullptr &&
721 permitted_subclasses() != Universe::the_empty_short_array() &&
722 !permitted_subclasses()->in_aot_cache()) {
723 MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
724 }
725 set_permitted_subclasses(nullptr);
726
727 // We should deallocate the Annotations instance if it's not in shared spaces.
728 if (annotations() != nullptr && !annotations()->in_aot_cache()) {
729 MetadataFactory::free_metadata(loader_data, annotations());
730 }
731 set_annotations(nullptr);
732
733 SystemDictionaryShared::handle_class_unloading(this);
734
735 #if INCLUDE_CDS_JAVA_HEAP
736 if (CDSConfig::is_dumping_heap()) {
737 HeapShared::remove_scratch_objects(this);
738 }
739 #endif
740 }
741
742 bool InstanceKlass::is_record() const {
743 return _record_components != nullptr &&
744 is_final() &&
745 super() == vmClasses::Record_klass();
746 }
747
748 bool InstanceKlass::is_sealed() const {
749 return _permitted_subclasses != nullptr &&
750 _permitted_subclasses != Universe::the_empty_short_array();
751 }
752
753 // JLS 8.9: An enum class is either implicitly final and derives
754 // from java.lang.Enum, or else is implicitly sealed to its
755 // anonymous subclasses. This query detects both kinds.
756 // It does not validate the finality or
757 // sealing conditions: it merely checks for a super of Enum.
758 // This is sufficient for recognizing well-formed enums.
759 bool InstanceKlass::is_enum_subclass() const {
760 InstanceKlass* s = super();
761 return (s == vmClasses::Enum_klass() ||
762 (s != nullptr && s->super() == vmClasses::Enum_klass()));
763 }
764
765 bool InstanceKlass::should_be_initialized() const {
766 return !is_initialized();
767 }
768
769 klassItable InstanceKlass::itable() const {
770 return klassItable(const_cast<InstanceKlass*>(this));
771 }
772
773 // JVMTI spec thinks there are signers and protection domain in the
774 // instanceKlass. These accessors pretend these fields are there.
775 // The hprof specification also thinks these fields are in InstanceKlass.
776 oop InstanceKlass::protection_domain() const {
777 // return the protection_domain from the mirror
778 return java_lang_Class::protection_domain(java_mirror());
779 }
780
781 objArrayOop InstanceKlass::signers() const {
782 // return the signers from the mirror
783 return java_lang_Class::signers(java_mirror());
784 }
785
786 oop InstanceKlass::init_lock() const {
787 // return the init lock from the mirror
788 oop lock = java_lang_Class::init_lock(java_mirror());
789 // Prevent reordering with any access of initialization state
790 OrderAccess::loadload();
791 assert(lock != nullptr || !is_not_initialized(), // initialized or in_error state
792 "only fully initialized state can have a null lock");
793 return lock;
794 }
795
796 // Set the initialization lock to null so the object can be GC'ed. Any racing
797 // threads to get this lock will see a null lock and will not lock.
798 // That's okay because they all check for initialized state after getting
799 // the lock and return. For preempted vthreads we keep the oop protected
800 // in the ObjectMonitor (see ObjectMonitor::set_object_strong()).
801 void InstanceKlass::fence_and_clear_init_lock() {
802 // make sure previous stores are all done, notably the init_state.
803 OrderAccess::storestore();
804 java_lang_Class::clear_init_lock(java_mirror());
805 assert(!is_not_initialized(), "class must be initialized now");
806 }
807
808 class PreemptableInitCall {
809 JavaThread* _thread;
810 bool _previous;
811 DEBUG_ONLY(InstanceKlass* _previous_klass;)
812 public:
813 PreemptableInitCall(JavaThread* thread, InstanceKlass* ik) : _thread(thread) {
814 _previous = thread->at_preemptable_init();
815 _thread->set_at_preemptable_init(true);
816 DEBUG_ONLY(_previous_klass = _thread->preempt_init_klass();)
817 DEBUG_ONLY(_thread->set_preempt_init_klass(ik));
818 }
819 ~PreemptableInitCall() {
820 _thread->set_at_preemptable_init(_previous);
821 DEBUG_ONLY(_thread->set_preempt_init_klass(_previous_klass));
822 }
823 };
824
825 void InstanceKlass::initialize_preemptable(TRAPS) {
826 if (this->should_be_initialized()) {
827 PreemptableInitCall pic(THREAD, this);
828 initialize_impl(THREAD);
829 } else {
830 assert(is_initialized(), "sanity check");
831 }
832 }
833
834 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
835 // process. The step comments refers to the procedure described in that section.
836 // Note: implementation moved to static method to expose the this pointer.
837 void InstanceKlass::initialize(TRAPS) {
838 if (this->should_be_initialized()) {
839 initialize_impl(CHECK);
840 // Note: at this point the class may be initialized
841 // OR it may be in the state of being initialized
842 // in case of recursive initialization!
843 } else {
844 assert(is_initialized(), "sanity check");
845 }
846 }
847
848 #ifdef ASSERT
849 void InstanceKlass::assert_no_clinit_will_run_for_aot_initialized_class() const {
850 assert(has_aot_initialized_mirror(), "must be");
851
852 InstanceKlass* s = super();
853 if (s != nullptr) {
854 DEBUG_ONLY(ResourceMark rm);
855 assert(s->is_initialized(), "super class %s of aot-inited class %s must have been initialized",
856 s->external_name(), external_name());
857 s->assert_no_clinit_will_run_for_aot_initialized_class();
858 }
859
860 Array<InstanceKlass*>* interfaces = local_interfaces();
861 int len = interfaces->length();
862 for (int i = 0; i < len; i++) {
863 InstanceKlass* intf = interfaces->at(i);
864 if (!intf->is_initialized()) {
865 ResourceMark rm;
866 // Note: an interface needs to be marked as is_initialized() only if
867 // - it has a <clinit>
868 // - it has declared a default method.
869 assert(!intf->interface_needs_clinit_execution_as_super(/*also_check_supers*/false),
870 "uninitialized super interface %s of aot-inited class %s must not have <clinit>",
871 intf->external_name(), external_name());
872 }
873 }
874 }
875 #endif
876
877 #if INCLUDE_CDS
878 void InstanceKlass::initialize_with_aot_initialized_mirror(TRAPS) {
879 assert(has_aot_initialized_mirror(), "must be");
880 assert(CDSConfig::is_loading_heap(), "must be");
881 assert(CDSConfig::is_using_aot_linked_classes(), "must be");
882 assert_no_clinit_will_run_for_aot_initialized_class();
883
884 if (is_initialized()) {
885 return;
886 }
887
888 if (is_runtime_setup_required()) {
889 // Need to take the slow path, which will call the runtimeSetup() function instead
890 // of <clinit>
891 initialize(CHECK);
892 return;
893 }
894 if (log_is_enabled(Info, aot, init)) {
895 ResourceMark rm;
896 log_info(aot, init)("%s (aot-inited)", external_name());
897 }
898
899 link_class(CHECK);
900
901 #ifdef ASSERT
902 {
903 Handle h_init_lock(THREAD, init_lock());
904 ObjectLocker ol(h_init_lock, THREAD);
905 assert(!is_initialized(), "sanity");
906 assert(!is_being_initialized(), "sanity");
907 assert(!is_in_error_state(), "sanity");
908 }
909 #endif
910
911 set_init_thread(THREAD);
912 set_initialization_state_and_notify(fully_initialized, CHECK);
913 }
914 #endif
915
916 bool InstanceKlass::verify_code(TRAPS) {
917 // 1) Verify the bytecodes
918 return Verifier::verify(this, should_verify_class(), THREAD);
919 }
920
921 void InstanceKlass::link_class(TRAPS) {
922 assert(is_loaded(), "must be loaded");
923 if (!is_linked()) {
924 link_class_impl(CHECK);
925 }
926 }
927
928 // Called to verify that a class can link during initialization, without
929 // throwing a VerifyError.
930 bool InstanceKlass::link_class_or_fail(TRAPS) {
931 assert(is_loaded(), "must be loaded");
932 if (!is_linked()) {
933 link_class_impl(CHECK_false);
934 }
935 return is_linked();
936 }
937
938 bool InstanceKlass::link_class_impl(TRAPS) {
939 if (CDSConfig::is_dumping_static_archive() && SystemDictionaryShared::has_class_failed_verification(this)) {
940 // This is for CDS static dump only -- we use the in_error_state to indicate that
941 // the class has failed verification. Throwing the NoClassDefFoundError here is just
942 // a convenient way to stop repeat attempts to verify the same (bad) class.
943 //
944 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
945 // if we are executing Java code. This is not a problem for CDS dumping phase since
946 // it doesn't execute any Java code.
947 ResourceMark rm(THREAD);
948 // Names are all known to be < 64k so we know this formatted message is not excessively large.
949 Exceptions::fthrow(THREAD_AND_LOCATION,
950 vmSymbols::java_lang_NoClassDefFoundError(),
951 "Class %s, or one of its supertypes, failed class initialization",
952 external_name());
953 return false;
954 }
955 // return if already verified
956 if (is_linked()) {
957 return true;
958 }
959
960 // Timing
961 // timer handles recursion
962 JavaThread* jt = THREAD;
963
964 // link super class before linking this class
965 InstanceKlass* super_klass = super();
966 if (super_klass != nullptr) {
967 if (super_klass->is_interface()) { // check if super class is an interface
968 ResourceMark rm(THREAD);
969 // Names are all known to be < 64k so we know this formatted message is not excessively large.
970 Exceptions::fthrow(
971 THREAD_AND_LOCATION,
972 vmSymbols::java_lang_IncompatibleClassChangeError(),
973 "class %s has interface %s as super class",
974 external_name(),
975 super_klass->external_name()
976 );
977 return false;
978 }
979
980 super_klass->link_class_impl(CHECK_false);
981 }
982
983 // link all interfaces implemented by this class before linking this class
984 Array<InstanceKlass*>* interfaces = local_interfaces();
985 int num_interfaces = interfaces->length();
986 for (int index = 0; index < num_interfaces; index++) {
987 InstanceKlass* interk = interfaces->at(index);
988 interk->link_class_impl(CHECK_false);
989 }
990
991 // in case the class is linked in the process of linking its superclasses
992 if (is_linked()) {
993 return true;
994 }
995
996 // trace only the link time for this klass that includes
997 // the verification time
998 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
999 ClassLoader::perf_class_link_selftime(),
1000 ClassLoader::perf_classes_linked(),
1001 jt->get_thread_stat()->perf_recursion_counts_addr(),
1002 jt->get_thread_stat()->perf_timers_addr(),
1003 PerfClassTraceTime::CLASS_LINK);
1004
1005 // verification & rewriting
1006 {
1007 HandleMark hm(THREAD);
1008 Handle h_init_lock(THREAD, init_lock());
1009 ObjectLocker ol(h_init_lock, CHECK_PREEMPTABLE_false);
1010 // Don't allow preemption if we link/initialize classes below,
1011 // since that would release this monitor while we are in the
1012 // middle of linking this class.
1013 NoPreemptMark npm(THREAD);
1014
1015 // rewritten will have been set if loader constraint error found
1016 // on an earlier link attempt
1017 // don't verify or rewrite if already rewritten
1018 //
1019
1020 if (!is_linked()) {
1021 if (!is_rewritten()) {
1022 if (in_aot_cache()) {
1023 assert(!verified_at_dump_time(), "must be");
1024 }
1025 {
1026 bool verify_ok = verify_code(THREAD);
1027 if (!verify_ok) {
1028 return false;
1029 }
1030 }
1031
1032 // Just in case a side-effect of verify linked this class already
1033 // (which can sometimes happen since the verifier loads classes
1034 // using custom class loaders, which are free to initialize things)
1035 if (is_linked()) {
1036 return true;
1037 }
1038
1039 // also sets rewritten
1040 rewrite_class(CHECK_false);
1041 } else if (in_aot_cache()) {
1042 SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
1043 }
1044
1045 // relocate jsrs and link methods after they are all rewritten
1046 link_methods(CHECK_false);
1047
1048 // Initialize the vtable and interface table after
1049 // methods have been rewritten since rewrite may
1050 // fabricate new Method*s.
1051 // also does loader constraint checking
1052 //
1053 // initialize_vtable and initialize_itable need to be rerun
1054 // for a shared class if
1055 // 1) the class is loaded by custom class loader or
1056 // 2) the class is loaded by built-in class loader but failed to add archived loader constraints or
1057 // 3) the class was not verified during dump time
1058 bool need_init_table = true;
1059 if (in_aot_cache() && verified_at_dump_time() &&
1060 SystemDictionaryShared::check_linking_constraints(THREAD, this)) {
1061 need_init_table = false;
1062 }
1063 if (need_init_table) {
1064 vtable().initialize_vtable_and_check_constraints(CHECK_false);
1065 itable().initialize_itable_and_check_constraints(CHECK_false);
1066 }
1067 #ifdef ASSERT
1068 vtable().verify(tty, true);
1069 // In case itable verification is ever added.
1070 // itable().verify(tty, true);
1071 #endif
1072 if (Universe::is_fully_initialized()) {
1073 DeoptimizationScope deopt_scope;
1074 {
1075 // Now mark all code that assumes the class is not linked.
1076 // Set state under the Compile_lock also.
1077 MutexLocker ml(THREAD, Compile_lock);
1078
1079 set_init_state(linked);
1080 CodeCache::mark_dependents_on(&deopt_scope, this);
1081 }
1082 // Perform the deopt handshake outside Compile_lock.
1083 deopt_scope.deoptimize_marked();
1084 } else {
1085 set_init_state(linked);
1086 }
1087 if (JvmtiExport::should_post_class_prepare()) {
1088 JvmtiExport::post_class_prepare(THREAD, this);
1089 }
1090 }
1091 }
1092 return true;
1093 }
1094
1095 // Rewrite the byte codes of all of the methods of a class.
1096 // The rewriter must be called exactly once. Rewriting must happen after
1097 // verification but before the first method of the class is executed.
1098 void InstanceKlass::rewrite_class(TRAPS) {
1099 assert(is_loaded(), "must be loaded");
1100 if (is_rewritten()) {
1101 assert(in_aot_cache(), "rewriting an unshared class?");
1102 return;
1103 }
1104 Rewriter::rewrite(this, CHECK);
1105 set_rewritten();
1106 }
1107
1108 // Now relocate and link method entry points after class is rewritten.
1109 // This is outside is_rewritten flag. In case of an exception, it can be
1110 // executed more than once.
1111 void InstanceKlass::link_methods(TRAPS) {
1112 PerfTraceTime timer(ClassLoader::perf_ik_link_methods_time());
1113
1114 int len = methods()->length();
1115 for (int i = len-1; i >= 0; i--) {
1116 methodHandle m(THREAD, methods()->at(i));
1117
1118 // Set up method entry points for compiler and interpreter .
1119 m->link_method(m, CHECK);
1120 }
1121 }
1122
1123 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
1124 void InstanceKlass::initialize_super_interfaces(TRAPS) {
1125 assert (has_nonstatic_concrete_methods(), "caller should have checked this");
1126 for (int i = 0; i < local_interfaces()->length(); ++i) {
1127 InstanceKlass* ik = local_interfaces()->at(i);
1128
1129 // Initialization is depth first search ie. we start with top of the inheritance tree
1130 // has_nonstatic_concrete_methods drives searching superinterfaces since it
1131 // means has_nonstatic_concrete_methods in its superinterface hierarchy
1132 if (ik->has_nonstatic_concrete_methods()) {
1133 ik->initialize_super_interfaces(CHECK);
1134 }
1135
1136 // Only initialize() interfaces that "declare" concrete methods.
1137 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
1138 ik->initialize(CHECK);
1139 }
1140 }
1141 }
1142
1143 using InitializationErrorTable = HashTable<const InstanceKlass*, OopHandle, 107, AnyObj::C_HEAP, mtClass>;
1144 static InitializationErrorTable* _initialization_error_table;
1145
1146 void InstanceKlass::add_initialization_error(JavaThread* current, Handle exception) {
1147 // Create the same exception with a message indicating the thread name,
1148 // and the StackTraceElements.
1149 Handle init_error = java_lang_Throwable::create_initialization_error(current, exception);
1150 ResourceMark rm(current);
1151 if (init_error.is_null()) {
1152 log_trace(class, init)("Unable to create the desired initialization error for class %s", external_name());
1153
1154 // We failed to create the new exception, most likely due to either out-of-memory or
1155 // a stackoverflow error. If the original exception was either of those then we save
1156 // the shared, pre-allocated, stackless, instance of that exception.
1157 if (exception->klass() == vmClasses::StackOverflowError_klass()) {
1158 log_debug(class, init)("Using shared StackOverflowError as initialization error for class %s", external_name());
1159 init_error = Handle(current, Universe::class_init_stack_overflow_error());
1160 } else if (exception->klass() == vmClasses::OutOfMemoryError_klass()) {
1161 log_debug(class, init)("Using shared OutOfMemoryError as initialization error for class %s", external_name());
1162 init_error = Handle(current, Universe::class_init_out_of_memory_error());
1163 } else {
1164 return;
1165 }
1166 }
1167
1168 MutexLocker ml(current, ClassInitError_lock);
1169 OopHandle elem = OopHandle(Universe::vm_global(), init_error());
1170 bool created;
1171 if (_initialization_error_table == nullptr) {
1172 _initialization_error_table = new (mtClass) InitializationErrorTable();
1173 }
1174 _initialization_error_table->put_if_absent(this, elem, &created);
1175 assert(created, "Initialization is single threaded");
1176 log_trace(class, init)("Initialization error added for class %s", external_name());
1177 }
1178
1179 oop InstanceKlass::get_initialization_error(JavaThread* current) {
1180 MutexLocker ml(current, ClassInitError_lock);
1181 if (_initialization_error_table == nullptr) {
1182 return nullptr;
1183 }
1184 OopHandle* h = _initialization_error_table->get(this);
1185 return (h != nullptr) ? h->resolve() : nullptr;
1186 }
1187
1188 // Need to remove entries for unloaded classes.
1189 void InstanceKlass::clean_initialization_error_table() {
1190 struct InitErrorTableCleaner {
1191 bool do_entry(const InstanceKlass* ik, OopHandle h) {
1192 if (!ik->is_loader_alive()) {
1193 h.release(Universe::vm_global());
1194 return true;
1195 } else {
1196 return false;
1197 }
1198 }
1199 };
1200
1201 assert_locked_or_safepoint(ClassInitError_lock);
1202 InitErrorTableCleaner cleaner;
1203 if (_initialization_error_table != nullptr) {
1204 _initialization_error_table->unlink(&cleaner);
1205 }
1206 }
1207
1208 class ThreadWaitingForClassInit : public StackObj {
1209 JavaThread* _thread;
1210 public:
1211 ThreadWaitingForClassInit(JavaThread* thread, InstanceKlass* ik) : _thread(thread) {
1212 _thread->set_class_to_be_initialized(ik);
1213 }
1214 ~ThreadWaitingForClassInit() {
1215 _thread->set_class_to_be_initialized(nullptr);
1216 }
1217 };
1218
1219 void InstanceKlass::initialize_impl(TRAPS) {
1220 HandleMark hm(THREAD);
1221
1222 // Make sure klass is linked (verified) before initialization
1223 // A class could already be verified, since it has been reflected upon.
1224 link_class(CHECK);
1225
1226 DTRACE_CLASSINIT_PROBE(required, -1);
1227
1228 bool wait = false;
1229
1230 JavaThread* jt = THREAD;
1231
1232 bool debug_logging_enabled = log_is_enabled(Debug, class, init);
1233
1234 // refer to the JVM book page 47 for description of steps
1235 // Step 1
1236 {
1237 Handle h_init_lock(THREAD, init_lock());
1238 ObjectLocker ol(h_init_lock, CHECK_PREEMPTABLE);
1239
1240 // Step 2
1241 // If we were to use wait() instead of waitInterruptibly() then
1242 // we might end up throwing IE from link/symbol resolution sites
1243 // that aren't expected to throw. This would wreak havoc. See 6320309.
1244 while (is_being_initialized() && !is_reentrant_initialization(jt)) {
1245 if (debug_logging_enabled) {
1246 ResourceMark rm(jt);
1247 log_debug(class, init)("Thread \"%s\" waiting for initialization of %s by thread \"%s\"",
1248 jt->name(), external_name(), init_thread_name());
1249 }
1250 wait = true;
1251 ThreadWaitingForClassInit twcl(THREAD, this);
1252 ol.wait_uninterruptibly(CHECK_PREEMPTABLE);
1253 }
1254
1255 // Step 3
1256 if (is_being_initialized() && is_reentrant_initialization(jt)) {
1257 if (debug_logging_enabled) {
1258 ResourceMark rm(jt);
1259 log_debug(class, init)("Thread \"%s\" recursively initializing %s",
1260 jt->name(), external_name());
1261 }
1262 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
1263 return;
1264 }
1265
1266 // Step 4
1267 if (is_initialized()) {
1268 if (debug_logging_enabled) {
1269 ResourceMark rm(jt);
1270 log_debug(class, init)("Thread \"%s\" found %s already initialized",
1271 jt->name(), external_name());
1272 }
1273 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
1274 return;
1275 }
1276
1277 // Step 5
1278 if (is_in_error_state()) {
1279 if (debug_logging_enabled) {
1280 ResourceMark rm(jt);
1281 log_debug(class, init)("Thread \"%s\" found %s is in error state",
1282 jt->name(), external_name());
1283 }
1284
1285 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
1286 ResourceMark rm(THREAD);
1287 Handle cause(THREAD, get_initialization_error(THREAD));
1288
1289 stringStream ss;
1290 ss.print("Could not initialize class %s", external_name());
1291 if (cause.is_null()) {
1292 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string());
1293 } else {
1294 THROW_MSG_CAUSE(vmSymbols::java_lang_NoClassDefFoundError(),
1295 ss.as_string(), cause);
1296 }
1297 } else {
1298
1299 // Step 6
1300 set_init_state(being_initialized);
1301 set_init_thread(jt);
1302 if (debug_logging_enabled) {
1303 ResourceMark rm(jt);
1304 log_debug(class, init)("Thread \"%s\" is initializing %s",
1305 jt->name(), external_name());
1306 }
1307 }
1308 }
1309
1310 // Block preemption once we are the initializer thread. Unmounting now
1311 // would complicate the reentrant case (identity is platform thread).
1312 NoPreemptMark npm(THREAD);
1313
1314 // Step 7
1315 // Next, if C is a class rather than an interface, initialize it's super class and super
1316 // interfaces.
1317 if (!is_interface()) {
1318 Klass* super_klass = super();
1319 if (super_klass != nullptr && super_klass->should_be_initialized()) {
1320 super_klass->initialize(THREAD);
1321 }
1322 // If C implements any interface that declares a non-static, concrete method,
1323 // the initialization of C triggers initialization of its super interfaces.
1324 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
1325 // having a superinterface that declares, non-static, concrete methods
1326 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) {
1327 initialize_super_interfaces(THREAD);
1328 }
1329
1330 // If any exceptions, complete abruptly, throwing the same exception as above.
1331 if (HAS_PENDING_EXCEPTION) {
1332 Handle e(THREAD, PENDING_EXCEPTION);
1333 CLEAR_PENDING_EXCEPTION;
1334 {
1335 EXCEPTION_MARK;
1336 add_initialization_error(THREAD, e);
1337 // Locks object, set state, and notify all waiting threads
1338 set_initialization_state_and_notify(initialization_error, THREAD);
1339 CLEAR_PENDING_EXCEPTION;
1340 }
1341 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1342 THROW_OOP(e());
1343 }
1344 }
1345
1346
1347 // Step 8
1348 {
1349 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1350 if (class_initializer() != nullptr) {
1351 // Timer includes any side effects of class initialization (resolution,
1352 // etc), but not recursive entry into call_class_initializer().
1353 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1354 ClassLoader::perf_class_init_selftime(),
1355 ClassLoader::perf_classes_inited(),
1356 jt->get_thread_stat()->perf_recursion_counts_addr(),
1357 jt->get_thread_stat()->perf_timers_addr(),
1358 PerfClassTraceTime::CLASS_CLINIT);
1359 call_class_initializer(THREAD);
1360 } else {
1361 // The elapsed time is so small it's not worth counting.
1362 if (UsePerfData) {
1363 ClassLoader::perf_classes_inited()->inc();
1364 }
1365 call_class_initializer(THREAD);
1366 }
1367 }
1368
1369 // Step 9
1370 if (!HAS_PENDING_EXCEPTION) {
1371 set_initialization_state_and_notify(fully_initialized, CHECK);
1372 DEBUG_ONLY(vtable().verify(tty, true);)
1373 CompilationPolicy::replay_training_at_init(this, THREAD);
1374 }
1375 else {
1376 // Step 10 and 11
1377 Handle e(THREAD, PENDING_EXCEPTION);
1378 CLEAR_PENDING_EXCEPTION;
1379 // JVMTI has already reported the pending exception
1380 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1381 JvmtiExport::clear_detected_exception(jt);
1382 {
1383 EXCEPTION_MARK;
1384 add_initialization_error(THREAD, e);
1385 set_initialization_state_and_notify(initialization_error, THREAD);
1386 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below
1387 // JVMTI has already reported the pending exception
1388 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1389 JvmtiExport::clear_detected_exception(jt);
1390 }
1391 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1392 if (e->is_a(vmClasses::Error_klass())) {
1393 THROW_OOP(e());
1394 } else {
1395 JavaCallArguments args(e);
1396 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
1397 vmSymbols::throwable_void_signature(),
1398 &args);
1399 }
1400 }
1401 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1402 }
1403
1404
1405 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
1406 Handle h_init_lock(THREAD, init_lock());
1407 if (h_init_lock() != nullptr) {
1408 ObjectLocker ol(h_init_lock, THREAD);
1409 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1410 set_init_state(state);
1411 fence_and_clear_init_lock();
1412 ol.notify_all(CHECK);
1413 } else {
1414 assert(h_init_lock() != nullptr, "The initialization state should never be set twice");
1415 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1416 set_init_state(state);
1417 }
1418 }
1419
1420 // Update hierarchy. This is done before the new klass has been added to the SystemDictionary. The Compile_lock
1421 // is grabbed, to ensure that the compiler is not using the class hierarchy.
1422 void InstanceKlass::add_to_hierarchy(JavaThread* current) {
1423 assert(!SafepointSynchronize::is_at_safepoint(), "must NOT be at safepoint");
1424
1425 DeoptimizationScope deopt_scope;
1426 {
1427 MutexLocker ml(current, Compile_lock);
1428
1429 set_init_state(InstanceKlass::loaded);
1430 // make sure init_state store is already done.
1431 // The compiler reads the hierarchy outside of the Compile_lock.
1432 // Access ordering is used to add to hierarchy.
1433
1434 // Link into hierarchy.
1435 append_to_sibling_list(); // add to superklass/sibling list
1436 process_interfaces(); // handle all "implements" declarations
1437
1438 // Now mark all code that depended on old class hierarchy.
1439 // Note: must be done *after* linking k into the hierarchy (was bug 12/9/97)
1440 if (Universe::is_fully_initialized()) {
1441 CodeCache::mark_dependents_on(&deopt_scope, this);
1442 }
1443 }
1444 // Perform the deopt handshake outside Compile_lock.
1445 deopt_scope.deoptimize_marked();
1446 }
1447
1448
1449 InstanceKlass* InstanceKlass::implementor() const {
1450 InstanceKlass* volatile* ik = adr_implementor();
1451 if (ik == nullptr) {
1452 return nullptr;
1453 } else {
1454 // This load races with inserts, and therefore needs acquire.
1455 InstanceKlass* ikls = AtomicAccess::load_acquire(ik);
1456 if (ikls != nullptr && !ikls->is_loader_alive()) {
1457 return nullptr; // don't return unloaded class
1458 } else {
1459 return ikls;
1460 }
1461 }
1462 }
1463
1464
1465 void InstanceKlass::set_implementor(InstanceKlass* ik) {
1466 assert_locked_or_safepoint(Compile_lock);
1467 assert(is_interface(), "not interface");
1468 InstanceKlass* volatile* addr = adr_implementor();
1469 assert(addr != nullptr, "null addr");
1470 if (addr != nullptr) {
1471 AtomicAccess::release_store(addr, ik);
1472 }
1473 }
1474
1475 int InstanceKlass::nof_implementors() const {
1476 InstanceKlass* ik = implementor();
1477 if (ik == nullptr) {
1478 return 0;
1479 } else if (ik != this) {
1480 return 1;
1481 } else {
1482 return 2;
1483 }
1484 }
1485
1486 // The embedded _implementor field can only record one implementor.
1487 // When there are more than one implementors, the _implementor field
1488 // is set to the interface Klass* itself. Following are the possible
1489 // values for the _implementor field:
1490 // null - no implementor
1491 // implementor Klass* - one implementor
1492 // self - more than one implementor
1493 //
1494 // The _implementor field only exists for interfaces.
1495 void InstanceKlass::add_implementor(InstanceKlass* ik) {
1496 if (Universe::is_fully_initialized()) {
1497 assert_lock_strong(Compile_lock);
1498 }
1499 assert(is_interface(), "not interface");
1500 // Filter out my subinterfaces.
1501 // (Note: Interfaces are never on the subklass list.)
1502 if (ik->is_interface()) return;
1503
1504 // Filter out subclasses whose supers already implement me.
1505 // (Note: CHA must walk subclasses of direct implementors
1506 // in order to locate indirect implementors.)
1507 InstanceKlass* super_ik = ik->super();
1508 if (super_ik != nullptr && super_ik->implements_interface(this))
1509 // We only need to check one immediate superclass, since the
1510 // implements_interface query looks at transitive_interfaces.
1511 // Any supers of the super have the same (or fewer) transitive_interfaces.
1512 return;
1513
1514 InstanceKlass* iklass = implementor();
1515 if (iklass == nullptr) {
1516 set_implementor(ik);
1517 } else if (iklass != this && iklass != ik) {
1518 // There is already an implementor. Use itself as an indicator of
1519 // more than one implementors.
1520 set_implementor(this);
1521 }
1522
1523 // The implementor also implements the transitive_interfaces
1524 for (int index = 0; index < local_interfaces()->length(); index++) {
1525 local_interfaces()->at(index)->add_implementor(ik);
1526 }
1527 }
1528
1529 void InstanceKlass::init_implementor() {
1530 if (is_interface()) {
1531 set_implementor(nullptr);
1532 }
1533 }
1534
1535
1536 void InstanceKlass::process_interfaces() {
1537 // link this class into the implementors list of every interface it implements
1538 for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1539 assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
1540 InstanceKlass* interf = local_interfaces()->at(i);
1541 assert(interf->is_interface(), "expected interface");
1542 interf->add_implementor(this);
1543 }
1544 }
1545
1546 bool InstanceKlass::can_be_primary_super_slow() const {
1547 if (is_interface())
1548 return false;
1549 else
1550 return Klass::can_be_primary_super_slow();
1551 }
1552
1553 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1554 Array<InstanceKlass*>* transitive_interfaces) {
1555 // The secondaries are the implemented interfaces.
1556 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1557 // (but it's safe to do here because we won't write into _secondary_supers from this point on).
1558 Array<Klass*>* interfaces = (Array<Klass*>*)(address)transitive_interfaces;
1559 int num_secondaries = num_extra_slots + interfaces->length();
1560 if (num_secondaries == 0) {
1561 // Must share this for correct bootstrapping!
1562 set_secondary_supers(Universe::the_empty_klass_array(), Universe::the_empty_klass_bitmap());
1563 return nullptr;
1564 } else if (num_extra_slots == 0 && interfaces->length() <= 1) {
1565 // We will reuse the transitive interfaces list if we're certain
1566 // it's in hash order.
1567 uintx bitmap = compute_secondary_supers_bitmap(interfaces);
1568 set_secondary_supers(interfaces, bitmap);
1569 return nullptr;
1570 }
1571 // Copy transitive interfaces to a temporary growable array to be constructed
1572 // into the secondary super list with extra slots.
1573 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1574 for (int i = 0; i < interfaces->length(); i++) {
1575 secondaries->push(interfaces->at(i));
1576 }
1577 return secondaries;
1578 }
1579
1580 bool InstanceKlass::implements_interface(Klass* k) const {
1581 if (this == k) return true;
1582 assert(k->is_interface(), "should be an interface class");
1583 for (int i = 0; i < transitive_interfaces()->length(); i++) {
1584 if (transitive_interfaces()->at(i) == k) {
1585 return true;
1586 }
1587 }
1588 return false;
1589 }
1590
1591 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1592 // Verify direct super interface
1593 if (this == k) return true;
1594 assert(k->is_interface(), "should be an interface class");
1595 for (int i = 0; i < local_interfaces()->length(); i++) {
1596 if (local_interfaces()->at(i) == k) {
1597 return true;
1598 }
1599 }
1600 return false;
1601 }
1602
1603 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1604 if (TraceFinalizerRegistration) {
1605 tty->print("Registered ");
1606 i->print_value_on(tty);
1607 tty->print_cr(" (" PTR_FORMAT ") as finalizable", p2i(i));
1608 }
1609 instanceHandle h_i(THREAD, i);
1610 // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1611 JavaValue result(T_VOID);
1612 JavaCallArguments args(h_i);
1613 methodHandle mh(THREAD, Universe::finalizer_register_method());
1614 JavaCalls::call(&result, mh, &args, CHECK_NULL);
1615 MANAGEMENT_ONLY(FinalizerService::on_register(h_i(), THREAD);)
1616 return h_i();
1617 }
1618
1619 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1620 assert(!is_abstract() && !is_interface(), "Should not create this object");
1621 size_t size = size_helper(); // Query before forming handle.
1622 return (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL);
1623 }
1624
1625 instanceOop InstanceKlass::allocate_instance(oop java_class, TRAPS) {
1626 Klass* k = java_lang_Class::as_Klass(java_class);
1627 if (k == nullptr) {
1628 ResourceMark rm(THREAD);
1629 THROW_(vmSymbols::java_lang_InstantiationException(), nullptr);
1630 }
1631 InstanceKlass* ik = cast(k);
1632 ik->check_valid_for_instantiation(false, CHECK_NULL);
1633 ik->initialize(CHECK_NULL);
1634 return ik->allocate_instance(THREAD);
1635 }
1636
1637 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) {
1638 return instanceHandle(THREAD, allocate_instance(THREAD));
1639 }
1640
1641 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1642 if (is_interface() || is_abstract()) {
1643 ResourceMark rm(THREAD);
1644 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1645 : vmSymbols::java_lang_InstantiationException(), external_name());
1646 }
1647 if (this == vmClasses::Class_klass()) {
1648 ResourceMark rm(THREAD);
1649 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1650 : vmSymbols::java_lang_IllegalAccessException(), external_name());
1651 }
1652 }
1653
1654 ArrayKlass* InstanceKlass::array_klass(int n, TRAPS) {
1655 // Need load-acquire for lock-free read
1656 if (array_klasses_acquire() == nullptr) {
1657
1658 // Recursively lock array allocation
1659 RecursiveLocker rl(MultiArray_lock, THREAD);
1660
1661 // Check if another thread created the array klass while we were waiting for the lock.
1662 if (array_klasses() == nullptr) {
1663 ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL);
1664 // use 'release' to pair with lock-free load
1665 release_set_array_klasses(k);
1666 }
1667 }
1668
1669 // array_klasses() will always be set at this point
1670 ObjArrayKlass* ak = array_klasses();
1671 assert(ak != nullptr, "should be set");
1672 return ak->array_klass(n, THREAD);
1673 }
1674
1675 ArrayKlass* InstanceKlass::array_klass_or_null(int n) {
1676 // Need load-acquire for lock-free read
1677 ObjArrayKlass* oak = array_klasses_acquire();
1678 if (oak == nullptr) {
1679 return nullptr;
1680 } else {
1681 return oak->array_klass_or_null(n);
1682 }
1683 }
1684
1685 ArrayKlass* InstanceKlass::array_klass(TRAPS) {
1686 return array_klass(1, THREAD);
1687 }
1688
1689 ArrayKlass* InstanceKlass::array_klass_or_null() {
1690 return array_klass_or_null(1);
1691 }
1692
1693 static int call_class_initializer_counter = 0; // for debugging
1694
1695 Method* InstanceKlass::class_initializer() const {
1696 Method* clinit = find_method(
1697 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1698 if (clinit != nullptr && clinit->has_valid_initializer_flags()) {
1699 return clinit;
1700 }
1701 return nullptr;
1702 }
1703
1704 void InstanceKlass::call_class_initializer(TRAPS) {
1705 if (ReplayCompiles &&
1706 (ReplaySuppressInitializers == 1 ||
1707 (ReplaySuppressInitializers >= 2 && class_loader() != nullptr))) {
1708 // Hide the existence of the initializer for the purpose of replaying the compile
1709 return;
1710 }
1711
1712 #if INCLUDE_CDS
1713 // This is needed to ensure the consistency of the archived heap objects.
1714 if (has_aot_initialized_mirror() && CDSConfig::is_loading_heap()) {
1715 AOTClassInitializer::call_runtime_setup(THREAD, this);
1716 return;
1717 } else if (has_archived_enum_objs()) {
1718 assert(in_aot_cache(), "must be");
1719 bool initialized = CDSEnumKlass::initialize_enum_klass(this, CHECK);
1720 if (initialized) {
1721 return;
1722 }
1723 }
1724 #endif
1725
1726 methodHandle h_method(THREAD, class_initializer());
1727 assert(!is_initialized(), "we cannot initialize twice");
1728 LogTarget(Info, class, init) lt;
1729 if (lt.is_enabled()) {
1730 ResourceMark rm(THREAD);
1731 LogStream ls(lt);
1732 ls.print("%d Initializing ", call_class_initializer_counter++);
1733 name()->print_value_on(&ls);
1734 ls.print_cr("%s (" PTR_FORMAT ") by thread \"%s\"",
1735 h_method() == nullptr ? "(no method)" : "", p2i(this),
1736 THREAD->name());
1737 }
1738 if (h_method() != nullptr) {
1739 ThreadInClassInitializer ticl(THREAD, this); // Track class being initialized
1740 JavaCallArguments args; // No arguments
1741 JavaValue result(T_VOID);
1742 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1743 }
1744 }
1745
1746 // If a class that implements this interface is initialized, is the JVM required
1747 // to first execute a <clinit> method declared in this interface,
1748 // or (if also_check_supers==true) any of the super types of this interface?
1749 //
1750 // JVMS 5.5. Initialization, step 7: Next, if C is a class rather than
1751 // an interface, then let SC be its superclass and let SI1, ..., SIn
1752 // be all superinterfaces of C (whether direct or indirect) that
1753 // declare at least one non-abstract, non-static method.
1754 //
1755 // So when an interface is initialized, it does not look at its
1756 // supers. But a proper class will ensure that all of its supers have
1757 // run their <clinit> methods, except that it disregards interfaces
1758 // that lack a non-static concrete method (i.e., a default method).
1759 // Therefore, you should probably call this method only when the
1760 // current class is a super of some proper class, not an interface.
1761 bool InstanceKlass::interface_needs_clinit_execution_as_super(bool also_check_supers) const {
1762 assert(is_interface(), "must be");
1763
1764 if (!has_nonstatic_concrete_methods()) {
1765 // quick check: no nonstatic concrete methods are declared by this or any super interfaces
1766 return false;
1767 }
1768
1769 // JVMS 5.5. Initialization
1770 // ...If C is an interface that declares a non-abstract,
1771 // non-static method, the initialization of a class that
1772 // implements C directly or indirectly.
1773 if (declares_nonstatic_concrete_methods() && class_initializer() != nullptr) {
1774 return true;
1775 }
1776 if (also_check_supers) {
1777 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
1778 for (int i = 0; i < all_ifs->length(); ++i) {
1779 InstanceKlass* super_intf = all_ifs->at(i);
1780 if (super_intf->declares_nonstatic_concrete_methods() && super_intf->class_initializer() != nullptr) {
1781 return true;
1782 }
1783 }
1784 }
1785 return false;
1786 }
1787
1788 void InstanceKlass::mask_for(const methodHandle& method, int bci,
1789 InterpreterOopMap* entry_for) {
1790 // Lazily create the _oop_map_cache at first request.
1791 // Load_acquire is needed to safely get instance published with CAS by another thread.
1792 OopMapCache* oop_map_cache = AtomicAccess::load_acquire(&_oop_map_cache);
1793 if (oop_map_cache == nullptr) {
1794 // Try to install new instance atomically.
1795 oop_map_cache = new OopMapCache();
1796 OopMapCache* other = AtomicAccess::cmpxchg(&_oop_map_cache, (OopMapCache*)nullptr, oop_map_cache);
1797 if (other != nullptr) {
1798 // Someone else managed to install before us, ditch local copy and use the existing one.
1799 delete oop_map_cache;
1800 oop_map_cache = other;
1801 }
1802 }
1803 // _oop_map_cache is constant after init; lookup below does its own locking.
1804 oop_map_cache->lookup(method, bci, entry_for);
1805 }
1806
1807 bool InstanceKlass::contains_field_offset(int offset) {
1808 fieldDescriptor fd;
1809 return find_field_from_offset(offset, false, &fd);
1810 }
1811
1812 FieldInfo InstanceKlass::field(int index) const {
1813 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
1814 if (fs.index() == index) {
1815 return fs.to_FieldInfo();
1816 }
1817 }
1818 fatal("Field not found");
1819 return FieldInfo();
1820 }
1821
1822 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1823 JavaFieldStream fs(this);
1824 if (fs.lookup(name, sig)) {
1825 assert(fs.name() == name, "name must match");
1826 assert(fs.signature() == sig, "signature must match");
1827 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
1828 return true;
1829 }
1830 return false;
1831 }
1832
1833
1834 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1835 const int n = local_interfaces()->length();
1836 for (int i = 0; i < n; i++) {
1837 InstanceKlass* intf1 = local_interfaces()->at(i);
1838 assert(intf1->is_interface(), "just checking type");
1839 // search for field in current interface
1840 if (intf1->find_local_field(name, sig, fd)) {
1841 assert(fd->is_static(), "interface field must be static");
1842 return intf1;
1843 }
1844 // search for field in direct superinterfaces
1845 Klass* intf2 = intf1->find_interface_field(name, sig, fd);
1846 if (intf2 != nullptr) return intf2;
1847 }
1848 // otherwise field lookup fails
1849 return nullptr;
1850 }
1851
1852
1853 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1854 // search order according to newest JVM spec (5.4.3.2, p.167).
1855 // 1) search for field in current klass
1856 if (find_local_field(name, sig, fd)) {
1857 return const_cast<InstanceKlass*>(this);
1858 }
1859 // 2) search for field recursively in direct superinterfaces
1860 { Klass* intf = find_interface_field(name, sig, fd);
1861 if (intf != nullptr) return intf;
1862 }
1863 // 3) apply field lookup recursively if superclass exists
1864 { InstanceKlass* supr = super();
1865 if (supr != nullptr) return supr->find_field(name, sig, fd);
1866 }
1867 // 4) otherwise field lookup fails
1868 return nullptr;
1869 }
1870
1871
1872 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
1873 // search order according to newest JVM spec (5.4.3.2, p.167).
1874 // 1) search for field in current klass
1875 if (find_local_field(name, sig, fd)) {
1876 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
1877 }
1878 // 2) search for field recursively in direct superinterfaces
1879 if (is_static) {
1880 Klass* intf = find_interface_field(name, sig, fd);
1881 if (intf != nullptr) return intf;
1882 }
1883 // 3) apply field lookup recursively if superclass exists
1884 { InstanceKlass* supr = super();
1885 if (supr != nullptr) return supr->find_field(name, sig, is_static, fd);
1886 }
1887 // 4) otherwise field lookup fails
1888 return nullptr;
1889 }
1890
1891
1892 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1893 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1894 if (fs.offset() == offset) {
1895 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
1896 if (fd->is_static() == is_static) return true;
1897 }
1898 }
1899 return false;
1900 }
1901
1902
1903 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1904 const InstanceKlass* klass = this;
1905 while (klass != nullptr) {
1906 if (klass->find_local_field_from_offset(offset, is_static, fd)) {
1907 return true;
1908 }
1909 klass = klass->super();
1910 }
1911 return false;
1912 }
1913
1914
1915 void InstanceKlass::methods_do(void f(Method* method)) {
1916 // Methods aren't stable until they are loaded. This can be read outside
1917 // a lock through the ClassLoaderData for profiling
1918 // Redefined scratch classes are on the list and need to be cleaned
1919 if (!is_loaded() && !is_scratch_class()) {
1920 return;
1921 }
1922
1923 int len = methods()->length();
1924 for (int index = 0; index < len; index++) {
1925 Method* m = methods()->at(index);
1926 assert(m->is_method(), "must be method");
1927 f(m);
1928 }
1929 }
1930
1931
1932 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1933 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1934 if (fs.access_flags().is_static()) {
1935 fieldDescriptor& fd = fs.field_descriptor();
1936 cl->do_field(&fd);
1937 }
1938 }
1939 }
1940
1941
1942 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
1943 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1944 if (fs.access_flags().is_static()) {
1945 fieldDescriptor& fd = fs.field_descriptor();
1946 f(&fd, mirror, CHECK);
1947 }
1948 }
1949 }
1950
1951 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1952 InstanceKlass* super = this->super();
1953 if (super != nullptr) {
1954 super->do_nonstatic_fields(cl);
1955 }
1956 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1957 fieldDescriptor& fd = fs.field_descriptor();
1958 if (!fd.is_static()) {
1959 cl->do_field(&fd);
1960 }
1961 }
1962 }
1963
1964 static int compare_fields_by_offset(FieldInfo* a, FieldInfo* b) {
1965 return a->offset() - b->offset();
1966 }
1967
1968 void InstanceKlass::print_nonstatic_fields(FieldClosure* cl) {
1969 InstanceKlass* super = this->super();
1970 if (super != nullptr) {
1971 super->print_nonstatic_fields(cl);
1972 }
1973 ResourceMark rm;
1974 // In DebugInfo nonstatic fields are sorted by offset.
1975 GrowableArray<FieldInfo> fields_sorted;
1976 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
1977 if (!fs.access_flags().is_static()) {
1978 fields_sorted.push(fs.to_FieldInfo());
1979 }
1980 }
1981 int length = fields_sorted.length();
1982 if (length > 0) {
1983 fields_sorted.sort(compare_fields_by_offset);
1984 fieldDescriptor fd;
1985 for (int i = 0; i < length; i++) {
1986 fd.reinitialize(this, fields_sorted.at(i));
1987 assert(!fd.is_static() && fd.offset() == checked_cast<int>(fields_sorted.at(i).offset()), "only nonstatic fields");
1988 cl->do_field(&fd);
1989 }
1990 }
1991 }
1992
1993 #ifdef ASSERT
1994 static int linear_search(const Array<Method*>* methods,
1995 const Symbol* name,
1996 const Symbol* signature) {
1997 const int len = methods->length();
1998 for (int index = 0; index < len; index++) {
1999 const Method* const m = methods->at(index);
2000 assert(m->is_method(), "must be method");
2001 if (m->signature() == signature && m->name() == name) {
2002 return index;
2003 }
2004 }
2005 return -1;
2006 }
2007 #endif
2008
2009 bool InstanceKlass::_disable_method_binary_search = false;
2010
2011 NOINLINE int linear_search(const Array<Method*>* methods, const Symbol* name) {
2012 int len = methods->length();
2013 int l = 0;
2014 int h = len - 1;
2015 while (l <= h) {
2016 Method* m = methods->at(l);
2017 if (m->name() == name) {
2018 return l;
2019 }
2020 l++;
2021 }
2022 return -1;
2023 }
2024
2025 inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
2026 if (_disable_method_binary_search) {
2027 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
2028 // At the final stage of dynamic dumping, the methods array may not be sorted
2029 // by ascending addresses of their names, so we can't use binary search anymore.
2030 // However, methods with the same name are still laid out consecutively inside the
2031 // methods array, so let's look for the first one that matches.
2032 return linear_search(methods, name);
2033 }
2034
2035 int len = methods->length();
2036 int l = 0;
2037 int h = len - 1;
2038
2039 // methods are sorted by ascending addresses of their names, so do binary search
2040 while (l <= h) {
2041 int mid = (l + h) >> 1;
2042 Method* m = methods->at(mid);
2043 assert(m->is_method(), "must be method");
2044 int res = m->name()->fast_compare(name);
2045 if (res == 0) {
2046 return mid;
2047 } else if (res < 0) {
2048 l = mid + 1;
2049 } else {
2050 h = mid - 1;
2051 }
2052 }
2053 return -1;
2054 }
2055
2056 // find_method looks up the name/signature in the local methods array
2057 Method* InstanceKlass::find_method(const Symbol* name,
2058 const Symbol* signature) const {
2059 return find_method_impl(name, signature,
2060 OverpassLookupMode::find,
2061 StaticLookupMode::find,
2062 PrivateLookupMode::find);
2063 }
2064
2065 Method* InstanceKlass::find_method_impl(const Symbol* name,
2066 const Symbol* signature,
2067 OverpassLookupMode overpass_mode,
2068 StaticLookupMode static_mode,
2069 PrivateLookupMode private_mode) const {
2070 return InstanceKlass::find_method_impl(methods(),
2071 name,
2072 signature,
2073 overpass_mode,
2074 static_mode,
2075 private_mode);
2076 }
2077
2078 // find_instance_method looks up the name/signature in the local methods array
2079 // and skips over static methods
2080 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
2081 const Symbol* name,
2082 const Symbol* signature,
2083 PrivateLookupMode private_mode) {
2084 Method* const meth = InstanceKlass::find_method_impl(methods,
2085 name,
2086 signature,
2087 OverpassLookupMode::find,
2088 StaticLookupMode::skip,
2089 private_mode);
2090 assert(((meth == nullptr) || !meth->is_static()),
2091 "find_instance_method should have skipped statics");
2092 return meth;
2093 }
2094
2095 // find_instance_method looks up the name/signature in the local methods array
2096 // and skips over static methods
2097 Method* InstanceKlass::find_instance_method(const Symbol* name,
2098 const Symbol* signature,
2099 PrivateLookupMode private_mode) const {
2100 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
2101 }
2102
2103 // Find looks up the name/signature in the local methods array
2104 // and filters on the overpass, static and private flags
2105 // This returns the first one found
2106 // note that the local methods array can have up to one overpass, one static
2107 // and one instance (private or not) with the same name/signature
2108 Method* InstanceKlass::find_local_method(const Symbol* name,
2109 const Symbol* signature,
2110 OverpassLookupMode overpass_mode,
2111 StaticLookupMode static_mode,
2112 PrivateLookupMode private_mode) const {
2113 return InstanceKlass::find_method_impl(methods(),
2114 name,
2115 signature,
2116 overpass_mode,
2117 static_mode,
2118 private_mode);
2119 }
2120
2121 // Find looks up the name/signature in the local methods array
2122 // and filters on the overpass, static and private flags
2123 // This returns the first one found
2124 // note that the local methods array can have up to one overpass, one static
2125 // and one instance (private or not) with the same name/signature
2126 Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
2127 const Symbol* name,
2128 const Symbol* signature,
2129 OverpassLookupMode overpass_mode,
2130 StaticLookupMode static_mode,
2131 PrivateLookupMode private_mode) {
2132 return InstanceKlass::find_method_impl(methods,
2133 name,
2134 signature,
2135 overpass_mode,
2136 static_mode,
2137 private_mode);
2138 }
2139
2140 Method* InstanceKlass::find_method(const Array<Method*>* methods,
2141 const Symbol* name,
2142 const Symbol* signature) {
2143 return InstanceKlass::find_method_impl(methods,
2144 name,
2145 signature,
2146 OverpassLookupMode::find,
2147 StaticLookupMode::find,
2148 PrivateLookupMode::find);
2149 }
2150
2151 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
2152 const Symbol* name,
2153 const Symbol* signature,
2154 OverpassLookupMode overpass_mode,
2155 StaticLookupMode static_mode,
2156 PrivateLookupMode private_mode) {
2157 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
2158 return hit >= 0 ? methods->at(hit): nullptr;
2159 }
2160
2161 // true if method matches signature and conforms to skipping_X conditions.
2162 static bool method_matches(const Method* m,
2163 const Symbol* signature,
2164 bool skipping_overpass,
2165 bool skipping_static,
2166 bool skipping_private) {
2167 return ((m->signature() == signature) &&
2168 (!skipping_overpass || !m->is_overpass()) &&
2169 (!skipping_static || !m->is_static()) &&
2170 (!skipping_private || !m->is_private()));
2171 }
2172
2173 // Used directly for default_methods to find the index into the
2174 // default_vtable_indices, and indirectly by find_method
2175 // find_method_index looks in the local methods array to return the index
2176 // of the matching name/signature. If, overpass methods are being ignored,
2177 // the search continues to find a potential non-overpass match. This capability
2178 // is important during method resolution to prefer a static method, for example,
2179 // over an overpass method.
2180 // There is the possibility in any _method's array to have the same name/signature
2181 // for a static method, an overpass method and a local instance method
2182 // To correctly catch a given method, the search criteria may need
2183 // to explicitly skip the other two. For local instance methods, it
2184 // is often necessary to skip private methods
2185 int InstanceKlass::find_method_index(const Array<Method*>* methods,
2186 const Symbol* name,
2187 const Symbol* signature,
2188 OverpassLookupMode overpass_mode,
2189 StaticLookupMode static_mode,
2190 PrivateLookupMode private_mode) {
2191 const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip);
2192 const bool skipping_static = (static_mode == StaticLookupMode::skip);
2193 const bool skipping_private = (private_mode == PrivateLookupMode::skip);
2194 const int hit = quick_search(methods, name);
2195 if (hit != -1) {
2196 const Method* const m = methods->at(hit);
2197
2198 // Do linear search to find matching signature. First, quick check
2199 // for common case, ignoring overpasses if requested.
2200 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2201 return hit;
2202 }
2203
2204 // search downwards through overloaded methods
2205 int i;
2206 for (i = hit - 1; i >= 0; --i) {
2207 const Method* const m = methods->at(i);
2208 assert(m->is_method(), "must be method");
2209 if (m->name() != name) {
2210 break;
2211 }
2212 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2213 return i;
2214 }
2215 }
2216 // search upwards
2217 for (i = hit + 1; i < methods->length(); ++i) {
2218 const Method* const m = methods->at(i);
2219 assert(m->is_method(), "must be method");
2220 if (m->name() != name) {
2221 break;
2222 }
2223 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2224 return i;
2225 }
2226 }
2227 // not found
2228 #ifdef ASSERT
2229 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
2230 linear_search(methods, name, signature);
2231 assert(-1 == index, "binary search should have found entry %d", index);
2232 #endif
2233 }
2234 return -1;
2235 }
2236
2237 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
2238 return find_method_by_name(methods(), name, end);
2239 }
2240
2241 int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
2242 const Symbol* name,
2243 int* end_ptr) {
2244 assert(end_ptr != nullptr, "just checking");
2245 int start = quick_search(methods, name);
2246 int end = start + 1;
2247 if (start != -1) {
2248 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
2249 while (end < methods->length() && (methods->at(end))->name() == name) ++end;
2250 *end_ptr = end;
2251 return start;
2252 }
2253 return -1;
2254 }
2255
2256 // uncached_lookup_method searches both the local class methods array and all
2257 // superclasses methods arrays, skipping any overpass methods in superclasses,
2258 // and possibly skipping private methods.
2259 Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
2260 const Symbol* signature,
2261 OverpassLookupMode overpass_mode,
2262 PrivateLookupMode private_mode) const {
2263 OverpassLookupMode overpass_local_mode = overpass_mode;
2264 const InstanceKlass* klass = this;
2265 while (klass != nullptr) {
2266 Method* const method = klass->find_method_impl(name,
2267 signature,
2268 overpass_local_mode,
2269 StaticLookupMode::find,
2270 private_mode);
2271 if (method != nullptr) {
2272 return method;
2273 }
2274 klass = klass->super();
2275 overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses
2276 }
2277 return nullptr;
2278 }
2279
2280 #ifdef ASSERT
2281 // search through class hierarchy and return true if this class or
2282 // one of the superclasses was redefined
2283 bool InstanceKlass::has_redefined_this_or_super() const {
2284 const InstanceKlass* klass = this;
2285 while (klass != nullptr) {
2286 if (klass->has_been_redefined()) {
2287 return true;
2288 }
2289 klass = klass->super();
2290 }
2291 return false;
2292 }
2293 #endif
2294
2295 // lookup a method in the default methods list then in all transitive interfaces
2296 // Do NOT return private or static methods
2297 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
2298 Symbol* signature) const {
2299 Method* m = nullptr;
2300 if (default_methods() != nullptr) {
2301 m = find_method(default_methods(), name, signature);
2302 }
2303 // Look up interfaces
2304 if (m == nullptr) {
2305 m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find);
2306 }
2307 return m;
2308 }
2309
2310 // lookup a method in all the interfaces that this class implements
2311 // Do NOT return private or static methods, new in JDK8 which are not externally visible
2312 // They should only be found in the initial InterfaceMethodRef
2313 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
2314 Symbol* signature,
2315 DefaultsLookupMode defaults_mode) const {
2316 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2317 int num_ifs = all_ifs->length();
2318 InstanceKlass *ik = nullptr;
2319 for (int i = 0; i < num_ifs; i++) {
2320 ik = all_ifs->at(i);
2321 Method* m = ik->lookup_method(name, signature);
2322 if (m != nullptr && m->is_public() && !m->is_static() &&
2323 ((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) {
2324 return m;
2325 }
2326 }
2327 return nullptr;
2328 }
2329
2330 PrintClassClosure::PrintClassClosure(outputStream* st, bool verbose)
2331 :_st(st), _verbose(verbose) {
2332 ResourceMark rm;
2333 _st->print("%-18s ", "KlassAddr");
2334 _st->print("%-4s ", "Size");
2335 _st->print("%-20s ", "State");
2336 _st->print("%-7s ", "Flags");
2337 _st->print("%-5s ", "ClassName");
2338 _st->cr();
2339 }
2340
2341 void PrintClassClosure::do_klass(Klass* k) {
2342 ResourceMark rm;
2343 // klass pointer
2344 _st->print(PTR_FORMAT " ", p2i(k));
2345 // klass size
2346 _st->print("%4d ", k->size());
2347 // initialization state
2348 if (k->is_instance_klass()) {
2349 _st->print("%-20s ",InstanceKlass::cast(k)->init_state_name());
2350 } else {
2351 _st->print("%-20s ","");
2352 }
2353 // misc flags(Changes should synced with ClassesDCmd::ClassesDCmd help doc)
2354 char buf[10];
2355 int i = 0;
2356 if (k->has_finalizer()) buf[i++] = 'F';
2357 if (k->is_instance_klass()) {
2358 InstanceKlass* ik = InstanceKlass::cast(k);
2359 if (ik->has_final_method()) buf[i++] = 'f';
2360 if (ik->is_rewritten()) buf[i++] = 'W';
2361 if (ik->is_contended()) buf[i++] = 'C';
2362 if (ik->has_been_redefined()) buf[i++] = 'R';
2363 if (ik->in_aot_cache()) buf[i++] = 'S';
2364 }
2365 buf[i++] = '\0';
2366 _st->print("%-7s ", buf);
2367 // klass name
2368 _st->print("%-5s ", k->external_name());
2369 // end
2370 _st->cr();
2371 if (_verbose) {
2372 k->print_on(_st);
2373 }
2374 }
2375
2376 /* jni_id_for for jfieldIds only */
2377 JNIid* InstanceKlass::jni_id_for(int offset) {
2378 MutexLocker ml(JfieldIdCreation_lock);
2379 JNIid* probe = jni_ids() == nullptr ? nullptr : jni_ids()->find(offset);
2380 if (probe == nullptr) {
2381 // Allocate new static field identifier
2382 probe = new JNIid(this, offset, jni_ids());
2383 set_jni_ids(probe);
2384 }
2385 return probe;
2386 }
2387
2388 u2 InstanceKlass::enclosing_method_data(int offset) const {
2389 const Array<jushort>* const inner_class_list = inner_classes();
2390 if (inner_class_list == nullptr) {
2391 return 0;
2392 }
2393 const int length = inner_class_list->length();
2394 if (length % inner_class_next_offset == 0) {
2395 return 0;
2396 }
2397 const int index = length - enclosing_method_attribute_size;
2398 assert(offset < enclosing_method_attribute_size, "invalid offset");
2399 return inner_class_list->at(index + offset);
2400 }
2401
2402 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
2403 u2 method_index) {
2404 Array<jushort>* inner_class_list = inner_classes();
2405 assert (inner_class_list != nullptr, "_inner_classes list is not set up");
2406 int length = inner_class_list->length();
2407 if (length % inner_class_next_offset == enclosing_method_attribute_size) {
2408 int index = length - enclosing_method_attribute_size;
2409 inner_class_list->at_put(
2410 index + enclosing_method_class_index_offset, class_index);
2411 inner_class_list->at_put(
2412 index + enclosing_method_method_index_offset, method_index);
2413 }
2414 }
2415
2416 jmethodID InstanceKlass::update_jmethod_id(jmethodID* jmeths, Method* method, int idnum) {
2417 if (method->is_old() && !method->is_obsolete()) {
2418 // If the method passed in is old (but not obsolete), use the current version.
2419 method = method_with_idnum((int)idnum);
2420 assert(method != nullptr, "old and but not obsolete, so should exist");
2421 }
2422 jmethodID new_id = Method::make_jmethod_id(class_loader_data(), method);
2423 AtomicAccess::release_store(&jmeths[idnum + 1], new_id);
2424 return new_id;
2425 }
2426
2427 // Allocate the jmethodID cache.
2428 static jmethodID* create_jmethod_id_cache(size_t size) {
2429 jmethodID* jmeths = NEW_C_HEAP_ARRAY(jmethodID, size + 1, mtClass);
2430 memset(jmeths, 0, (size + 1) * sizeof(jmethodID));
2431 // cache size is stored in element[0], other elements offset by one
2432 jmeths[0] = (jmethodID)size;
2433 return jmeths;
2434 }
2435
2436 // When reading outside a lock, use this.
2437 jmethodID* InstanceKlass::methods_jmethod_ids_acquire() const {
2438 return AtomicAccess::load_acquire(&_methods_jmethod_ids);
2439 }
2440
2441 void InstanceKlass::release_set_methods_jmethod_ids(jmethodID* jmeths) {
2442 AtomicAccess::release_store(&_methods_jmethod_ids, jmeths);
2443 }
2444
2445 // Lookup or create a jmethodID.
2446 jmethodID InstanceKlass::get_jmethod_id(Method* method) {
2447 int idnum = method->method_idnum();
2448 jmethodID* jmeths = methods_jmethod_ids_acquire();
2449
2450 // We use a double-check locking idiom here because this cache is
2451 // performance sensitive. In the normal system, this cache only
2452 // transitions from null to non-null which is safe because we use
2453 // release_set_methods_jmethod_ids() to advertise the new cache.
2454 // A partially constructed cache should never be seen by a racing
2455 // thread. We also use release_store() to save a new jmethodID
2456 // in the cache so a partially constructed jmethodID should never be
2457 // seen either. Cache reads of existing jmethodIDs proceed without a
2458 // lock, but cache writes of a new jmethodID requires uniqueness and
2459 // creation of the cache itself requires no leaks so a lock is
2460 // acquired in those two cases.
2461 //
2462 // If the RedefineClasses() API has been used, then this cache grows
2463 // in the redefinition safepoint.
2464
2465 if (jmeths == nullptr) {
2466 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2467 jmeths = _methods_jmethod_ids;
2468 // Still null?
2469 if (jmeths == nullptr) {
2470 size_t size = idnum_allocated_count();
2471 assert(size > (size_t)idnum, "should already have space");
2472 jmeths = create_jmethod_id_cache(size);
2473 jmethodID new_id = update_jmethod_id(jmeths, method, idnum);
2474
2475 // publish jmeths
2476 release_set_methods_jmethod_ids(jmeths);
2477 return new_id;
2478 }
2479 }
2480
2481 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2482 if (id == nullptr) {
2483 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2484 id = jmeths[idnum + 1];
2485 // Still null?
2486 if (id == nullptr) {
2487 return update_jmethod_id(jmeths, method, idnum);
2488 }
2489 }
2490 return id;
2491 }
2492
2493 void InstanceKlass::update_methods_jmethod_cache() {
2494 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2495 jmethodID* cache = _methods_jmethod_ids;
2496 if (cache != nullptr) {
2497 size_t size = idnum_allocated_count();
2498 size_t old_size = (size_t)cache[0];
2499 if (old_size < size + 1) {
2500 // Allocate a larger one and copy entries to the new one.
2501 // They've already been updated to point to new methods where applicable (i.e., not obsolete).
2502 jmethodID* new_cache = create_jmethod_id_cache(size);
2503
2504 for (int i = 1; i <= (int)old_size; i++) {
2505 new_cache[i] = cache[i];
2506 }
2507 _methods_jmethod_ids = new_cache;
2508 FREE_C_HEAP_ARRAY(jmethodID, cache);
2509 }
2510 }
2511 }
2512
2513 // Make a jmethodID for all methods in this class. This makes getting all method
2514 // ids much, much faster with classes with more than 8
2515 // methods, and has a *substantial* effect on performance with jvmti
2516 // code that loads all jmethodIDs for all classes.
2517 void InstanceKlass::make_methods_jmethod_ids() {
2518 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2519 jmethodID* jmeths = _methods_jmethod_ids;
2520 if (jmeths == nullptr) {
2521 jmeths = create_jmethod_id_cache(idnum_allocated_count());
2522 release_set_methods_jmethod_ids(jmeths);
2523 }
2524
2525 int length = methods()->length();
2526 for (int index = 0; index < length; index++) {
2527 Method* m = methods()->at(index);
2528 int idnum = m->method_idnum();
2529 assert(!m->is_old(), "should not have old methods or I'm confused");
2530 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2531 if (!m->is_overpass() && // skip overpasses
2532 id == nullptr) {
2533 id = Method::make_jmethod_id(class_loader_data(), m);
2534 AtomicAccess::release_store(&jmeths[idnum + 1], id);
2535 }
2536 }
2537 }
2538
2539 // Lookup a jmethodID, null if not found. Do no blocking, no allocations, no handles
2540 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2541 int idnum = method->method_idnum();
2542 jmethodID* jmeths = methods_jmethod_ids_acquire();
2543 return (jmeths != nullptr) ? jmeths[idnum + 1] : nullptr;
2544 }
2545
2546 inline DependencyContext InstanceKlass::dependencies() {
2547 DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2548 return dep_context;
2549 }
2550
2551 void InstanceKlass::mark_dependent_nmethods(DeoptimizationScope* deopt_scope, KlassDepChange& changes) {
2552 dependencies().mark_dependent_nmethods(deopt_scope, changes);
2553 }
2554
2555 void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2556 assert_lock_strong(CodeCache_lock);
2557 dependencies().add_dependent_nmethod(nm);
2558 }
2559
2560 void InstanceKlass::clean_dependency_context() {
2561 dependencies().clean_unloading_dependents();
2562 }
2563
2564 #ifndef PRODUCT
2565 void InstanceKlass::print_dependent_nmethods(bool verbose) {
2566 dependencies().print_dependent_nmethods(verbose);
2567 }
2568
2569 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2570 return dependencies().is_dependent_nmethod(nm);
2571 }
2572 #endif //PRODUCT
2573
2574 void InstanceKlass::clean_weak_instanceklass_links() {
2575 clean_implementors_list();
2576 clean_method_data();
2577 }
2578
2579 void InstanceKlass::clean_implementors_list() {
2580 assert(is_loader_alive(), "this klass should be live");
2581 if (is_interface()) {
2582 assert (ClassUnloading, "only called for ClassUnloading");
2583 for (;;) {
2584 // Use load_acquire due to competing with inserts
2585 InstanceKlass* volatile* iklass = adr_implementor();
2586 assert(iklass != nullptr, "Klass must not be null");
2587 InstanceKlass* impl = AtomicAccess::load_acquire(iklass);
2588 if (impl != nullptr && !impl->is_loader_alive()) {
2589 // null this field, might be an unloaded instance klass or null
2590 if (AtomicAccess::cmpxchg(iklass, impl, (InstanceKlass*)nullptr) == impl) {
2591 // Successfully unlinking implementor.
2592 if (log_is_enabled(Trace, class, unload)) {
2593 ResourceMark rm;
2594 log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2595 }
2596 return;
2597 }
2598 } else {
2599 return;
2600 }
2601 }
2602 }
2603 }
2604
2605 void InstanceKlass::clean_method_data() {
2606 for (int m = 0; m < methods()->length(); m++) {
2607 MethodData* mdo = methods()->at(m)->method_data();
2608 if (mdo != nullptr) {
2609 mdo->clean_method_data(/*always_clean*/false);
2610 }
2611 }
2612 }
2613
2614 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
2615 Klass::metaspace_pointers_do(it);
2616
2617 if (log_is_enabled(Trace, aot)) {
2618 ResourceMark rm;
2619 log_trace(aot)("Iter(InstanceKlass): %p (%s)", this, external_name());
2620 }
2621
2622 it->push(&_annotations);
2623 it->push((Klass**)&_array_klasses);
2624 if (!is_rewritten()) {
2625 it->push(&_constants, MetaspaceClosure::_writable);
2626 } else {
2627 it->push(&_constants);
2628 }
2629 it->push(&_inner_classes);
2630 #if INCLUDE_JVMTI
2631 it->push(&_previous_versions);
2632 #endif
2633 #if INCLUDE_CDS
2634 // For "old" classes with methods containing the jsr bytecode, the _methods array will
2635 // be rewritten during runtime (see Rewriter::rewrite_jsrs()) but they cannot be safely
2636 // checked here with ByteCodeStream. All methods that can't be verified are made writable.
2637 // The length check on the _methods is necessary because classes which don't have any
2638 // methods share the Universe::_the_empty_method_array which is in the RO region.
2639 if (_methods != nullptr && _methods->length() > 0 && !can_be_verified_at_dumptime()) {
2640 // To handle jsr bytecode, new Method* maybe stored into _methods
2641 it->push(&_methods, MetaspaceClosure::_writable);
2642 } else {
2643 #endif
2644 it->push(&_methods);
2645 #if INCLUDE_CDS
2646 }
2647 #endif
2648 it->push(&_default_methods);
2649 it->push(&_local_interfaces);
2650 it->push(&_transitive_interfaces);
2651 it->push(&_method_ordering);
2652 if (!is_rewritten()) {
2653 it->push(&_default_vtable_indices, MetaspaceClosure::_writable);
2654 } else {
2655 it->push(&_default_vtable_indices);
2656 }
2657
2658 it->push(&_fieldinfo_stream);
2659 it->push(&_fieldinfo_search_table);
2660 // _fields_status might be written into by Rewriter::scan_method() -> fd.set_has_initialized_final_update()
2661 it->push(&_fields_status, MetaspaceClosure::_writable);
2662
2663 if (itable_length() > 0) {
2664 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2665 int method_table_offset_in_words = ioe->offset()/wordSize;
2666 int itable_offset_in_words = (int)(start_of_itable() - (intptr_t*)this);
2667
2668 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words)
2669 / itableOffsetEntry::size();
2670
2671 for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
2672 if (ioe->interface_klass() != nullptr) {
2673 it->push(ioe->interface_klass_addr());
2674 itableMethodEntry* ime = ioe->first_method_entry(this);
2675 int n = klassItable::method_count_for_interface(ioe->interface_klass());
2676 for (int index = 0; index < n; index ++) {
2677 it->push(ime[index].method_addr());
2678 }
2679 }
2680 }
2681 }
2682
2683 it->push(&_nest_host);
2684 it->push(&_nest_members);
2685 it->push(&_permitted_subclasses);
2686 it->push(&_record_components);
2687 }
2688
2689 #if INCLUDE_CDS
2690 void InstanceKlass::remove_unshareable_info() {
2691
2692 if (is_linked()) {
2693 assert(can_be_verified_at_dumptime(), "must be");
2694 // Remember this so we can avoid walking the hierarchy at runtime.
2695 set_verified_at_dump_time();
2696 }
2697
2698 _misc_flags.set_has_init_deps_processed(false);
2699
2700 Klass::remove_unshareable_info();
2701
2702 if (SystemDictionaryShared::has_class_failed_verification(this)) {
2703 // Classes are attempted to link during dumping and may fail,
2704 // but these classes are still in the dictionary and class list in CLD.
2705 // If the class has failed verification, there is nothing else to remove.
2706 return;
2707 }
2708
2709 // Reset to the 'allocated' state to prevent any premature accessing to
2710 // a shared class at runtime while the class is still being loaded and
2711 // restored. A class' init_state is set to 'loaded' at runtime when it's
2712 // being added to class hierarchy (see InstanceKlass:::add_to_hierarchy()).
2713 _init_state = allocated;
2714
2715 { // Otherwise this needs to take out the Compile_lock.
2716 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2717 init_implementor();
2718 }
2719
2720 // Call remove_unshareable_info() on other objects that belong to this class, except
2721 // for constants()->remove_unshareable_info(), which is called in a separate pass in
2722 // ArchiveBuilder::make_klasses_shareable(),
2723
2724 for (int i = 0; i < methods()->length(); i++) {
2725 Method* m = methods()->at(i);
2726 m->remove_unshareable_info();
2727 }
2728
2729 // do array classes also.
2730 if (array_klasses() != nullptr) {
2731 array_klasses()->remove_unshareable_info();
2732 }
2733
2734 // These are not allocated from metaspace. They are safe to set to null.
2735 _source_debug_extension = nullptr;
2736 _dep_context = nullptr;
2737 _osr_nmethods_head = nullptr;
2738 #if INCLUDE_JVMTI
2739 _breakpoints = nullptr;
2740 _previous_versions = nullptr;
2741 _cached_class_file = nullptr;
2742 _jvmti_cached_class_field_map = nullptr;
2743 #endif
2744
2745 _init_thread = nullptr;
2746 _methods_jmethod_ids = nullptr;
2747 _jni_ids = nullptr;
2748 _oop_map_cache = nullptr;
2749 if (CDSConfig::is_dumping_method_handles() && HeapShared::is_lambda_proxy_klass(this)) {
2750 // keep _nest_host
2751 } else {
2752 // clear _nest_host to ensure re-load at runtime
2753 _nest_host = nullptr;
2754 }
2755 init_shared_package_entry();
2756 _dep_context_last_cleaned = 0;
2757 DEBUG_ONLY(_shared_class_load_count = 0);
2758
2759 remove_unshareable_flags();
2760
2761 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
2762 }
2763
2764 void InstanceKlass::remove_unshareable_flags() {
2765 // clear all the flags/stats that shouldn't be in the archived version
2766 assert(!is_scratch_class(), "must be");
2767 assert(!has_been_redefined(), "must be");
2768 #if INCLUDE_JVMTI
2769 set_is_being_redefined(false);
2770 #endif
2771 set_has_resolved_methods(false);
2772 }
2773
2774 void InstanceKlass::remove_java_mirror() {
2775 Klass::remove_java_mirror();
2776
2777 // do array classes also.
2778 if (array_klasses() != nullptr) {
2779 array_klasses()->remove_java_mirror();
2780 }
2781 }
2782
2783 void InstanceKlass::init_shared_package_entry() {
2784 assert(CDSConfig::is_dumping_archive(), "must be");
2785 #if !INCLUDE_CDS_JAVA_HEAP
2786 _package_entry = nullptr;
2787 #else
2788 if (CDSConfig::is_dumping_full_module_graph()) {
2789 if (defined_by_other_loaders()) {
2790 _package_entry = nullptr;
2791 } else {
2792 _package_entry = PackageEntry::get_archived_entry(_package_entry);
2793 }
2794 } else if (CDSConfig::is_dumping_dynamic_archive() &&
2795 CDSConfig::is_using_full_module_graph() &&
2796 AOTMetaspace::in_aot_cache(_package_entry)) {
2797 // _package_entry is an archived package in the base archive. Leave it as is.
2798 } else {
2799 _package_entry = nullptr;
2800 }
2801 ArchivePtrMarker::mark_pointer((address**)&_package_entry);
2802 #endif
2803 }
2804
2805 void InstanceKlass::compute_has_loops_flag_for_methods() {
2806 Array<Method*>* methods = this->methods();
2807 for (int index = 0; index < methods->length(); ++index) {
2808 Method* m = methods->at(index);
2809 if (!m->is_overpass()) { // work around JDK-8305771
2810 m->compute_has_loops_flag();
2811 }
2812 }
2813 }
2814
2815 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain,
2816 PackageEntry* pkg_entry, TRAPS) {
2817 // InstanceKlass::add_to_hierarchy() sets the init_state to loaded
2818 // before the InstanceKlass is added to the SystemDictionary. Make
2819 // sure the current state is <loaded.
2820 assert(!is_loaded(), "invalid init state");
2821 assert(!shared_loading_failed(), "Must not try to load failed class again");
2822 set_package(loader_data, pkg_entry, CHECK);
2823 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
2824
2825 Array<Method*>* methods = this->methods();
2826 int num_methods = methods->length();
2827 for (int index = 0; index < num_methods; ++index) {
2828 methods->at(index)->restore_unshareable_info(CHECK);
2829 }
2830 #if INCLUDE_JVMTI
2831 if (JvmtiExport::has_redefined_a_class()) {
2832 // Reinitialize vtable because RedefineClasses may have changed some
2833 // entries in this vtable for super classes so the CDS vtable might
2834 // point to old or obsolete entries. RedefineClasses doesn't fix up
2835 // vtables in the shared system dictionary, only the main one.
2836 // It also redefines the itable too so fix that too.
2837 // First fix any default methods that point to a super class that may
2838 // have been redefined.
2839 bool trace_name_printed = false;
2840 adjust_default_methods(&trace_name_printed);
2841 if (verified_at_dump_time()) {
2842 // Initialize vtable and itable for classes which can be verified at dump time.
2843 // Unlinked classes such as old classes with major version < 50 cannot be verified
2844 // at dump time.
2845 vtable().initialize_vtable();
2846 itable().initialize_itable();
2847 }
2848 }
2849 #endif // INCLUDE_JVMTI
2850
2851 // restore constant pool resolved references
2852 constants()->restore_unshareable_info(CHECK);
2853
2854 if (array_klasses() != nullptr) {
2855 // To get a consistent list of classes we need MultiArray_lock to ensure
2856 // array classes aren't observed while they are being restored.
2857 RecursiveLocker rl(MultiArray_lock, THREAD);
2858 assert(this == array_klasses()->bottom_klass(), "sanity");
2859 // Array classes have null protection domain.
2860 // --> see ArrayKlass::complete_create_array_klass()
2861 array_klasses()->restore_unshareable_info(class_loader_data(), Handle(), CHECK);
2862 }
2863
2864 // Initialize @ValueBased class annotation if not already set in the archived klass.
2865 if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation() && !is_value_based()) {
2866 set_is_value_based();
2867 }
2868
2869 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
2870 }
2871
2872 bool InstanceKlass::can_be_verified_at_dumptime() const {
2873 if (AOTMetaspace::in_aot_cache(this)) {
2874 // This is a class that was dumped into the base archive, so we know
2875 // it was verified at dump time.
2876 return true;
2877 }
2878
2879 if (CDSConfig::is_preserving_verification_constraints()) {
2880 return true;
2881 }
2882
2883 if (CDSConfig::is_old_class_for_verifier(this)) {
2884 // The old verifier does not save verification constraints, so at run time
2885 // SystemDictionaryShared::check_verification_constraints() will not work for this class.
2886 return false;
2887 }
2888 if (super() != nullptr && !super()->can_be_verified_at_dumptime()) {
2889 return false;
2890 }
2891 Array<InstanceKlass*>* interfaces = local_interfaces();
2892 int len = interfaces->length();
2893 for (int i = 0; i < len; i++) {
2894 if (!interfaces->at(i)->can_be_verified_at_dumptime()) {
2895 return false;
2896 }
2897 }
2898 return true;
2899 }
2900
2901 #endif // INCLUDE_CDS
2902
2903 #if INCLUDE_JVMTI
2904 static void clear_all_breakpoints(Method* m) {
2905 m->clear_all_breakpoints();
2906 }
2907 #endif
2908
2909 void InstanceKlass::unload_class(InstanceKlass* ik) {
2910
2911 if (ik->is_scratch_class()) {
2912 assert(ik->dependencies().is_empty(), "dependencies should be empty for scratch classes");
2913 return;
2914 }
2915 assert(ik->is_loaded(), "class should be loaded " PTR_FORMAT, p2i(ik));
2916
2917 // Release dependencies.
2918 ik->dependencies().remove_all_dependents();
2919
2920 // notify the debugger
2921 if (JvmtiExport::should_post_class_unload()) {
2922 JvmtiExport::post_class_unload(ik);
2923 }
2924
2925 // notify ClassLoadingService of class unload
2926 ClassLoadingService::notify_class_unloaded(ik);
2927
2928 SystemDictionaryShared::handle_class_unloading(ik);
2929
2930 if (log_is_enabled(Info, class, unload)) {
2931 ResourceMark rm;
2932 log_info(class, unload)("unloading class %s " PTR_FORMAT, ik->external_name(), p2i(ik));
2933 }
2934
2935 Events::log_class_unloading(Thread::current(), ik);
2936
2937 #if INCLUDE_JFR
2938 assert(ik != nullptr, "invariant");
2939 EventClassUnload event;
2940 event.set_unloadedClass(ik);
2941 event.set_definingClassLoader(ik->class_loader_data());
2942 event.commit();
2943 #endif
2944 }
2945
2946 static void method_release_C_heap_structures(Method* m) {
2947 m->release_C_heap_structures();
2948 }
2949
2950 // Called also by InstanceKlass::deallocate_contents, with false for release_sub_metadata.
2951 void InstanceKlass::release_C_heap_structures(bool release_sub_metadata) {
2952 // Clean up C heap
2953 Klass::release_C_heap_structures();
2954
2955 // Deallocate and call destructors for MDO mutexes
2956 if (release_sub_metadata) {
2957 methods_do(method_release_C_heap_structures);
2958 }
2959
2960 // Deallocate oop map cache
2961 if (_oop_map_cache != nullptr) {
2962 delete _oop_map_cache;
2963 _oop_map_cache = nullptr;
2964 }
2965
2966 // Deallocate JNI identifiers for jfieldIDs
2967 JNIid::deallocate(jni_ids());
2968 set_jni_ids(nullptr);
2969
2970 jmethodID* jmeths = _methods_jmethod_ids;
2971 if (jmeths != nullptr) {
2972 release_set_methods_jmethod_ids(nullptr);
2973 FreeHeap(jmeths);
2974 }
2975
2976 assert(_dep_context == nullptr,
2977 "dependencies should already be cleaned");
2978
2979 #if INCLUDE_JVMTI
2980 // Deallocate breakpoint records
2981 if (breakpoints() != nullptr) {
2982 methods_do(clear_all_breakpoints);
2983 assert(breakpoints() == nullptr, "should have cleared breakpoints");
2984 }
2985
2986 // deallocate the cached class file
2987 if (_cached_class_file != nullptr) {
2988 os::free(_cached_class_file);
2989 _cached_class_file = nullptr;
2990 }
2991 #endif
2992
2993 FREE_C_HEAP_ARRAY(char, _source_debug_extension);
2994
2995 if (release_sub_metadata) {
2996 constants()->release_C_heap_structures();
2997 }
2998 }
2999
3000 // The constant pool is on stack if any of the methods are executing or
3001 // referenced by handles.
3002 bool InstanceKlass::on_stack() const {
3003 return _constants->on_stack();
3004 }
3005
3006 Symbol* InstanceKlass::source_file_name() const { return _constants->source_file_name(); }
3007 u2 InstanceKlass::source_file_name_index() const { return _constants->source_file_name_index(); }
3008 void InstanceKlass::set_source_file_name_index(u2 sourcefile_index) { _constants->set_source_file_name_index(sourcefile_index); }
3009
3010 // minor and major version numbers of class file
3011 u2 InstanceKlass::minor_version() const { return _constants->minor_version(); }
3012 void InstanceKlass::set_minor_version(u2 minor_version) { _constants->set_minor_version(minor_version); }
3013 u2 InstanceKlass::major_version() const { return _constants->major_version(); }
3014 void InstanceKlass::set_major_version(u2 major_version) { _constants->set_major_version(major_version); }
3015
3016 const InstanceKlass* InstanceKlass::get_klass_version(int version) const {
3017 for (const InstanceKlass* ik = this; ik != nullptr; ik = ik->previous_versions()) {
3018 if (ik->constants()->version() == version) {
3019 return ik;
3020 }
3021 }
3022 return nullptr;
3023 }
3024
3025 void InstanceKlass::set_source_debug_extension(const char* array, int length) {
3026 if (array == nullptr) {
3027 _source_debug_extension = nullptr;
3028 } else {
3029 // Adding one to the attribute length in order to store a null terminator
3030 // character could cause an overflow because the attribute length is
3031 // already coded with an u4 in the classfile, but in practice, it's
3032 // unlikely to happen.
3033 assert((length+1) > length, "Overflow checking");
3034 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
3035 for (int i = 0; i < length; i++) {
3036 sde[i] = array[i];
3037 }
3038 sde[length] = '\0';
3039 _source_debug_extension = sde;
3040 }
3041 }
3042
3043 Symbol* InstanceKlass::generic_signature() const { return _constants->generic_signature(); }
3044 u2 InstanceKlass::generic_signature_index() const { return _constants->generic_signature_index(); }
3045 void InstanceKlass::set_generic_signature_index(u2 sig_index) { _constants->set_generic_signature_index(sig_index); }
3046
3047 const char* InstanceKlass::signature_name() const {
3048
3049 // Get the internal name as a c string
3050 const char* src = (const char*) (name()->as_C_string());
3051 const int src_length = (int)strlen(src);
3052
3053 char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
3054
3055 // Add L as type indicator
3056 int dest_index = 0;
3057 dest[dest_index++] = JVM_SIGNATURE_CLASS;
3058
3059 // Add the actual class name
3060 for (int src_index = 0; src_index < src_length; ) {
3061 dest[dest_index++] = src[src_index++];
3062 }
3063
3064 if (is_hidden()) { // Replace the last '+' with a '.'.
3065 for (int index = (int)src_length; index > 0; index--) {
3066 if (dest[index] == '+') {
3067 dest[index] = JVM_SIGNATURE_DOT;
3068 break;
3069 }
3070 }
3071 }
3072
3073 // Add the semicolon and the null
3074 dest[dest_index++] = JVM_SIGNATURE_ENDCLASS;
3075 dest[dest_index] = '\0';
3076 return dest;
3077 }
3078
3079 ModuleEntry* InstanceKlass::module() const {
3080 if (is_hidden() &&
3081 in_unnamed_package() &&
3082 class_loader_data()->has_class_mirror_holder()) {
3083 // For a non-strong hidden class defined to an unnamed package,
3084 // its (class held) CLD will not have an unnamed module created for it.
3085 // Two choices to find the correct ModuleEntry:
3086 // 1. If hidden class is within a nest, use nest host's module
3087 // 2. Find the unnamed module off from the class loader
3088 // For now option #2 is used since a nest host is not set until
3089 // after the instance class is created in jvm_lookup_define_class().
3090 if (class_loader_data()->is_boot_class_loader_data()) {
3091 return ClassLoaderData::the_null_class_loader_data()->unnamed_module();
3092 } else {
3093 oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader());
3094 assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module");
3095 return java_lang_Module::module_entry(module);
3096 }
3097 }
3098
3099 // Class is in a named package
3100 if (!in_unnamed_package()) {
3101 return _package_entry->module();
3102 }
3103
3104 // Class is in an unnamed package, return its loader's unnamed module
3105 return class_loader_data()->unnamed_module();
3106 }
3107
3108 bool InstanceKlass::in_javabase_module() const {
3109 return module()->name() == vmSymbols::java_base();
3110 }
3111
3112 void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPS) {
3113
3114 // ensure java/ packages only loaded by boot or platform builtin loaders
3115 // not needed for shared class since CDS does not archive prohibited classes.
3116 if (!in_aot_cache()) {
3117 check_prohibited_package(name(), loader_data, CHECK);
3118 }
3119
3120 if (in_aot_cache() && _package_entry != nullptr) {
3121 if (CDSConfig::is_using_full_module_graph() && _package_entry == pkg_entry) {
3122 // we can use the saved package
3123 assert(AOTMetaspace::in_aot_cache(_package_entry), "must be");
3124 return;
3125 } else {
3126 _package_entry = nullptr;
3127 }
3128 }
3129
3130 // ClassLoader::package_from_class_name has already incremented the refcount of the symbol
3131 // it returns, so we need to decrement it when the current function exits.
3132 TempNewSymbol from_class_name =
3133 (pkg_entry != nullptr) ? nullptr : ClassLoader::package_from_class_name(name());
3134
3135 Symbol* pkg_name;
3136 if (pkg_entry != nullptr) {
3137 pkg_name = pkg_entry->name();
3138 } else {
3139 pkg_name = from_class_name;
3140 }
3141
3142 if (pkg_name != nullptr && loader_data != nullptr) {
3143
3144 // Find in class loader's package entry table.
3145 _package_entry = pkg_entry != nullptr ? pkg_entry : loader_data->packages()->lookup_only(pkg_name);
3146
3147 // If the package name is not found in the loader's package
3148 // entry table, it is an indication that the package has not
3149 // been defined. Consider it defined within the unnamed module.
3150 if (_package_entry == nullptr) {
3151
3152 if (!ModuleEntryTable::javabase_defined()) {
3153 // Before java.base is defined during bootstrapping, define all packages in
3154 // the java.base module. If a non-java.base package is erroneously placed
3155 // in the java.base module it will be caught later when java.base
3156 // is defined by ModuleEntryTable::verify_javabase_packages check.
3157 assert(ModuleEntryTable::javabase_moduleEntry() != nullptr, JAVA_BASE_NAME " module is null");
3158 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, ModuleEntryTable::javabase_moduleEntry());
3159 } else {
3160 assert(loader_data->unnamed_module() != nullptr, "unnamed module is null");
3161 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, loader_data->unnamed_module());
3162 }
3163
3164 // A package should have been successfully created
3165 DEBUG_ONLY(ResourceMark rm(THREAD));
3166 assert(_package_entry != nullptr, "Package entry for class %s not found, loader %s",
3167 name()->as_C_string(), loader_data->loader_name_and_id());
3168 }
3169
3170 if (log_is_enabled(Debug, module)) {
3171 ResourceMark rm(THREAD);
3172 ModuleEntry* m = _package_entry->module();
3173 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s",
3174 external_name(),
3175 pkg_name->as_C_string(),
3176 loader_data->loader_name_and_id(),
3177 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE));
3178 }
3179 } else {
3180 ResourceMark rm(THREAD);
3181 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
3182 external_name(),
3183 (loader_data != nullptr) ? loader_data->loader_name_and_id() : "null",
3184 UNNAMED_MODULE);
3185 }
3186 }
3187
3188 // Function set_classpath_index ensures that for a non-null _package_entry
3189 // of the InstanceKlass, the entry is in the boot loader's package entry table.
3190 // It then sets the classpath_index in the package entry record.
3191 //
3192 // The classpath_index field is used to find the entry on the boot loader class
3193 // path for packages with classes loaded by the boot loader from -Xbootclasspath/a
3194 // in an unnamed module. It is also used to indicate (for all packages whose
3195 // classes are loaded by the boot loader) that at least one of the package's
3196 // classes has been loaded.
3197 void InstanceKlass::set_classpath_index(s2 path_index) {
3198 if (_package_entry != nullptr) {
3199 DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();)
3200 assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same");
3201 assert(path_index != -1, "Unexpected classpath_index");
3202 _package_entry->set_classpath_index(path_index);
3203 }
3204 }
3205
3206 // different versions of is_same_class_package
3207
3208 bool InstanceKlass::is_same_class_package(const Klass* class2) const {
3209 oop classloader1 = this->class_loader();
3210 PackageEntry* classpkg1 = this->package();
3211 if (class2->is_objArray_klass()) {
3212 class2 = ObjArrayKlass::cast(class2)->bottom_klass();
3213 }
3214
3215 oop classloader2;
3216 PackageEntry* classpkg2;
3217 if (class2->is_instance_klass()) {
3218 classloader2 = class2->class_loader();
3219 classpkg2 = class2->package();
3220 } else {
3221 assert(class2->is_typeArray_klass(), "should be type array");
3222 classloader2 = nullptr;
3223 classpkg2 = nullptr;
3224 }
3225
3226 // Same package is determined by comparing class loader
3227 // and package entries. Both must be the same. This rule
3228 // applies even to classes that are defined in the unnamed
3229 // package, they still must have the same class loader.
3230 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
3231 return true;
3232 }
3233
3234 return false;
3235 }
3236
3237 // return true if this class and other_class are in the same package. Classloader
3238 // and classname information is enough to determine a class's package
3239 bool InstanceKlass::is_same_class_package(oop other_class_loader,
3240 const Symbol* other_class_name) const {
3241 if (class_loader() != other_class_loader) {
3242 return false;
3243 }
3244 if (name()->fast_compare(other_class_name) == 0) {
3245 return true;
3246 }
3247
3248 {
3249 ResourceMark rm;
3250
3251 bool bad_class_name = false;
3252 TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name);
3253 if (bad_class_name) {
3254 return false;
3255 }
3256 // Check that package_from_class_name() returns null, not "", if there is no package.
3257 assert(other_pkg == nullptr || other_pkg->utf8_length() > 0, "package name is empty string");
3258
3259 const Symbol* const this_package_name =
3260 this->package() != nullptr ? this->package()->name() : nullptr;
3261
3262 if (this_package_name == nullptr || other_pkg == nullptr) {
3263 // One of the two doesn't have a package. Only return true if the other
3264 // one also doesn't have a package.
3265 return this_package_name == other_pkg;
3266 }
3267
3268 // Check if package is identical
3269 return this_package_name->fast_compare(other_pkg) == 0;
3270 }
3271 }
3272
3273 static bool is_prohibited_package_slow(Symbol* class_name) {
3274 // Caller has ResourceMark
3275 int length;
3276 jchar* unicode = class_name->as_unicode(length);
3277 return (length >= 5 &&
3278 unicode[0] == 'j' &&
3279 unicode[1] == 'a' &&
3280 unicode[2] == 'v' &&
3281 unicode[3] == 'a' &&
3282 unicode[4] == '/');
3283 }
3284
3285 // Only boot and platform class loaders can define classes in "java/" packages.
3286 void InstanceKlass::check_prohibited_package(Symbol* class_name,
3287 ClassLoaderData* loader_data,
3288 TRAPS) {
3289 if (!loader_data->is_boot_class_loader_data() &&
3290 !loader_data->is_platform_class_loader_data() &&
3291 class_name != nullptr && class_name->utf8_length() >= 5) {
3292 ResourceMark rm(THREAD);
3293 bool prohibited;
3294 const u1* base = class_name->base();
3295 if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) {
3296 prohibited = is_prohibited_package_slow(class_name);
3297 } else {
3298 char* name = class_name->as_C_string();
3299 prohibited = (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/');
3300 }
3301 if (prohibited) {
3302 TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name);
3303 assert(pkg_name != nullptr, "Error in parsing package name starting with 'java/'");
3304 char* name = pkg_name->as_C_string();
3305 const char* class_loader_name = loader_data->loader_name_and_id();
3306 StringUtils::replace_no_expand(name, "/", ".");
3307 const char* msg_text1 = "Class loader (instance of): ";
3308 const char* msg_text2 = " tried to load prohibited package name: ";
3309 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
3310 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len);
3311 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
3312 THROW_MSG(vmSymbols::java_lang_SecurityException(), message);
3313 }
3314 }
3315 return;
3316 }
3317
3318 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const {
3319 constantPoolHandle i_cp(THREAD, constants());
3320 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
3321 int ioff = iter.inner_class_info_index();
3322 if (ioff != 0) {
3323 // Check to see if the name matches the class we're looking for
3324 // before attempting to find the class.
3325 if (i_cp->klass_name_at_matches(this, ioff)) {
3326 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
3327 if (this == inner_klass) {
3328 *ooff = iter.outer_class_info_index();
3329 *noff = iter.inner_name_index();
3330 return true;
3331 }
3332 }
3333 }
3334 }
3335 return false;
3336 }
3337
3338 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const {
3339 InstanceKlass* outer_klass = nullptr;
3340 *inner_is_member = false;
3341 int ooff = 0, noff = 0;
3342 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD);
3343 if (has_inner_classes_attr) {
3344 constantPoolHandle i_cp(THREAD, constants());
3345 if (ooff != 0) {
3346 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);
3347 if (!ok->is_instance_klass()) {
3348 // If the outer class is not an instance klass then it cannot have
3349 // declared any inner classes.
3350 ResourceMark rm(THREAD);
3351 // Names are all known to be < 64k so we know this formatted message is not excessively large.
3352 Exceptions::fthrow(
3353 THREAD_AND_LOCATION,
3354 vmSymbols::java_lang_IncompatibleClassChangeError(),
3355 "%s and %s disagree on InnerClasses attribute",
3356 ok->external_name(),
3357 external_name());
3358 return nullptr;
3359 }
3360 outer_klass = InstanceKlass::cast(ok);
3361 *inner_is_member = true;
3362 }
3363 if (nullptr == outer_klass) {
3364 // It may be a local class; try for that.
3365 int encl_method_class_idx = enclosing_method_class_index();
3366 if (encl_method_class_idx != 0) {
3367 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL);
3368 outer_klass = InstanceKlass::cast(ok);
3369 *inner_is_member = false;
3370 }
3371 }
3372 }
3373
3374 // If no inner class attribute found for this class.
3375 if (nullptr == outer_klass) return nullptr;
3376
3377 // Throws an exception if outer klass has not declared k as an inner klass
3378 // We need evidence that each klass knows about the other, or else
3379 // the system could allow a spoof of an inner class to gain access rights.
3380 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL);
3381 return outer_klass;
3382 }
3383
3384 u2 InstanceKlass::compute_modifier_flags() const {
3385 u2 access = access_flags().as_unsigned_short();
3386
3387 // But check if it happens to be member class.
3388 InnerClassesIterator iter(this);
3389 for (; !iter.done(); iter.next()) {
3390 int ioff = iter.inner_class_info_index();
3391 // Inner class attribute can be zero, skip it.
3392 // Strange but true: JVM spec. allows null inner class refs.
3393 if (ioff == 0) continue;
3394
3395 // only look at classes that are already loaded
3396 // since we are looking for the flags for our self.
3397 Symbol* inner_name = constants()->klass_name_at(ioff);
3398 if (name() == inner_name) {
3399 // This is really a member class.
3400 access = iter.inner_access_flags();
3401 break;
3402 }
3403 }
3404 // Remember to strip ACC_SUPER bit
3405 return (access & (~JVM_ACC_SUPER));
3406 }
3407
3408 jint InstanceKlass::jvmti_class_status() const {
3409 jint result = 0;
3410
3411 if (is_linked()) {
3412 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
3413 }
3414
3415 if (is_initialized()) {
3416 assert(is_linked(), "Class status is not consistent");
3417 result |= JVMTI_CLASS_STATUS_INITIALIZED;
3418 }
3419 if (is_in_error_state()) {
3420 result |= JVMTI_CLASS_STATUS_ERROR;
3421 }
3422 return result;
3423 }
3424
3425 Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPS) {
3426 bool implements_interface; // initialized by method_at_itable_or_null
3427 Method* m = method_at_itable_or_null(holder, index,
3428 implements_interface); // out parameter
3429 if (m != nullptr) {
3430 assert(implements_interface, "sanity");
3431 return m;
3432 } else if (implements_interface) {
3433 // Throw AbstractMethodError since corresponding itable slot is empty.
3434 THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
3435 } else {
3436 // If the interface isn't implemented by the receiver class,
3437 // the VM should throw IncompatibleClassChangeError.
3438 ResourceMark rm(THREAD);
3439 stringStream ss;
3440 bool same_module = (module() == holder->module());
3441 ss.print("Receiver class %s does not implement "
3442 "the interface %s defining the method to be called "
3443 "(%s%s%s)",
3444 external_name(), holder->external_name(),
3445 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
3446 (same_module) ? "" : "; ",
3447 (same_module) ? "" : holder->class_in_module_of_loader());
3448 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string());
3449 }
3450 }
3451
3452 Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) {
3453 klassItable itable(this);
3454 for (int i = 0; i < itable.size_offset_table(); i++) {
3455 itableOffsetEntry* offset_entry = itable.offset_entry(i);
3456 if (offset_entry->interface_klass() == holder) {
3457 implements_interface = true;
3458 itableMethodEntry* ime = offset_entry->first_method_entry(this);
3459 Method* m = ime[index].method();
3460 return m;
3461 }
3462 }
3463 implements_interface = false;
3464 return nullptr; // offset entry not found
3465 }
3466
3467 int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) {
3468 assert(is_linked(), "required");
3469 assert(intf_method->method_holder()->is_interface(), "not an interface method");
3470 assert(is_subtype_of(intf_method->method_holder()), "interface not implemented");
3471
3472 int vtable_index = Method::invalid_vtable_index;
3473 Symbol* name = intf_method->name();
3474 Symbol* signature = intf_method->signature();
3475
3476 // First check in default method array
3477 if (!intf_method->is_abstract() && default_methods() != nullptr) {
3478 int index = find_method_index(default_methods(),
3479 name, signature,
3480 Klass::OverpassLookupMode::find,
3481 Klass::StaticLookupMode::find,
3482 Klass::PrivateLookupMode::find);
3483 if (index >= 0) {
3484 vtable_index = default_vtable_indices()->at(index);
3485 }
3486 }
3487 if (vtable_index == Method::invalid_vtable_index) {
3488 // get vtable_index for miranda methods
3489 klassVtable vt = vtable();
3490 vtable_index = vt.index_of_miranda(name, signature);
3491 }
3492 return vtable_index;
3493 }
3494
3495 #if INCLUDE_JVMTI
3496 // update default_methods for redefineclasses for methods that are
3497 // not yet in the vtable due to concurrent subclass define and superinterface
3498 // redefinition
3499 // Note: those in the vtable, should have been updated via adjust_method_entries
3500 void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
3501 // search the default_methods for uses of either obsolete or EMCP methods
3502 if (default_methods() != nullptr) {
3503 for (int index = 0; index < default_methods()->length(); index ++) {
3504 Method* old_method = default_methods()->at(index);
3505 if (old_method == nullptr || !old_method->is_old()) {
3506 continue; // skip uninteresting entries
3507 }
3508 assert(!old_method->is_deleted(), "default methods may not be deleted");
3509 Method* new_method = old_method->get_new_method();
3510 default_methods()->at_put(index, new_method);
3511
3512 if (log_is_enabled(Info, redefine, class, update)) {
3513 ResourceMark rm;
3514 if (!(*trace_name_printed)) {
3515 log_info(redefine, class, update)
3516 ("adjust: klassname=%s default methods from name=%s",
3517 external_name(), old_method->method_holder()->external_name());
3518 *trace_name_printed = true;
3519 }
3520 log_debug(redefine, class, update, vtables)
3521 ("default method update: %s(%s) ",
3522 new_method->name()->as_C_string(), new_method->signature()->as_C_string());
3523 }
3524 }
3525 }
3526 }
3527 #endif // INCLUDE_JVMTI
3528
3529 // On-stack replacement stuff
3530 void InstanceKlass::add_osr_nmethod(nmethod* n) {
3531 assert_lock_strong(NMethodState_lock);
3532 #ifndef PRODUCT
3533 nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3534 assert(prev == nullptr || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation),
3535 "redundant OSR recompilation detected. memory leak in CodeCache!");
3536 #endif
3537 // only one compilation can be active
3538 assert(n->is_osr_method(), "wrong kind of nmethod");
3539 n->set_osr_link(osr_nmethods_head());
3540 set_osr_nmethods_head(n);
3541 // Raise the highest osr level if necessary
3542 n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level()));
3543
3544 // Get rid of the osr methods for the same bci that have lower levels.
3545 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3546 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3547 if (inv != nullptr && inv->is_in_use()) {
3548 inv->make_not_entrant(nmethod::InvalidationReason::OSR_INVALIDATION_OF_LOWER_LEVEL);
3549 }
3550 }
3551 }
3552
3553 // Remove osr nmethod from the list. Return true if found and removed.
3554 bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3555 // This is a short non-blocking critical region, so the no safepoint check is ok.
3556 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3557 assert(n->is_osr_method(), "wrong kind of nmethod");
3558 nmethod* last = nullptr;
3559 nmethod* cur = osr_nmethods_head();
3560 int max_level = CompLevel_none; // Find the max comp level excluding n
3561 Method* m = n->method();
3562 // Search for match
3563 bool found = false;
3564 while(cur != nullptr && cur != n) {
3565 if (m == cur->method()) {
3566 // Find max level before n
3567 max_level = MAX2(max_level, cur->comp_level());
3568 }
3569 last = cur;
3570 cur = cur->osr_link();
3571 }
3572 nmethod* next = nullptr;
3573 if (cur == n) {
3574 found = true;
3575 next = cur->osr_link();
3576 if (last == nullptr) {
3577 // Remove first element
3578 set_osr_nmethods_head(next);
3579 } else {
3580 last->set_osr_link(next);
3581 }
3582 }
3583 n->set_osr_link(nullptr);
3584 cur = next;
3585 while (cur != nullptr) {
3586 // Find max level after n
3587 if (m == cur->method()) {
3588 max_level = MAX2(max_level, cur->comp_level());
3589 }
3590 cur = cur->osr_link();
3591 }
3592 m->set_highest_osr_comp_level(max_level);
3593 return found;
3594 }
3595
3596 int InstanceKlass::mark_osr_nmethods(DeoptimizationScope* deopt_scope, const Method* m) {
3597 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3598 nmethod* osr = osr_nmethods_head();
3599 int found = 0;
3600 while (osr != nullptr) {
3601 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3602 if (osr->method() == m) {
3603 deopt_scope->mark(osr);
3604 found++;
3605 }
3606 osr = osr->osr_link();
3607 }
3608 return found;
3609 }
3610
3611 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
3612 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3613 nmethod* osr = osr_nmethods_head();
3614 nmethod* best = nullptr;
3615 while (osr != nullptr) {
3616 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3617 // There can be a time when a c1 osr method exists but we are waiting
3618 // for a c2 version. When c2 completes its osr nmethod we will trash
3619 // the c1 version and only be able to find the c2 version. However
3620 // while we overflow in the c1 code at back branches we don't want to
3621 // try and switch to the same code as we are already running
3622
3623 if (osr->method() == m &&
3624 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
3625 if (match_level) {
3626 if (osr->comp_level() == comp_level) {
3627 // Found a match - return it.
3628 return osr;
3629 }
3630 } else {
3631 if (best == nullptr || (osr->comp_level() > best->comp_level())) {
3632 if (osr->comp_level() == CompilationPolicy::highest_compile_level()) {
3633 // Found the best possible - return it.
3634 return osr;
3635 }
3636 best = osr;
3637 }
3638 }
3639 }
3640 osr = osr->osr_link();
3641 }
3642
3643 assert(match_level == false || best == nullptr, "shouldn't pick up anything if match_level is set");
3644 if (best != nullptr && best->comp_level() >= comp_level) {
3645 return best;
3646 }
3647 return nullptr;
3648 }
3649
3650 // -----------------------------------------------------------------------------------------------------
3651 // Printing
3652
3653 #define BULLET " - "
3654
3655 static const char* state_names[] = {
3656 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
3657 };
3658
3659 static void print_vtable(intptr_t* start, int len, outputStream* st) {
3660 for (int i = 0; i < len; i++) {
3661 intptr_t e = start[i];
3662 st->print("%d : " INTPTR_FORMAT, i, e);
3663 if (MetaspaceObj::is_valid((Metadata*)e)) {
3664 st->print(" ");
3665 ((Metadata*)e)->print_value_on(st);
3666 }
3667 st->cr();
3668 }
3669 }
3670
3671 static void print_vtable(vtableEntry* start, int len, outputStream* st) {
3672 return print_vtable(reinterpret_cast<intptr_t*>(start), len, st);
3673 }
3674
3675 const char* InstanceKlass::init_state_name() const {
3676 return state_names[init_state()];
3677 }
3678
3679 void InstanceKlass::print_on(outputStream* st) const {
3680 assert(is_klass(), "must be klass");
3681 Klass::print_on(st);
3682
3683 st->print(BULLET"instance size: %d", size_helper()); st->cr();
3684 st->print(BULLET"klass size: %d", size()); st->cr();
3685 st->print(BULLET"access: "); access_flags().print_on(st); st->cr();
3686 st->print(BULLET"flags: "); _misc_flags.print_on(st); st->cr();
3687 st->print(BULLET"state: "); st->print_cr("%s", init_state_name());
3688 st->print(BULLET"name: "); name()->print_value_on(st); st->cr();
3689 st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
3690 st->print(BULLET"sub: ");
3691 Klass* sub = subklass();
3692 int n;
3693 for (n = 0; sub != nullptr; n++, sub = sub->next_sibling()) {
3694 if (n < MaxSubklassPrintSize) {
3695 sub->print_value_on(st);
3696 st->print(" ");
3697 }
3698 }
3699 if (n >= MaxSubklassPrintSize) st->print("(%zd more klasses...)", n - MaxSubklassPrintSize);
3700 st->cr();
3701
3702 if (is_interface()) {
3703 st->print_cr(BULLET"nof implementors: %d", nof_implementors());
3704 if (nof_implementors() == 1) {
3705 st->print_cr(BULLET"implementor: ");
3706 st->print(" ");
3707 implementor()->print_value_on(st);
3708 st->cr();
3709 }
3710 }
3711
3712 st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
3713 st->print(BULLET"methods: "); methods()->print_value_on(st); st->cr();
3714 if (Verbose || WizardMode) {
3715 Array<Method*>* method_array = methods();
3716 for (int i = 0; i < method_array->length(); i++) {
3717 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3718 }
3719 }
3720 st->print(BULLET"method ordering: "); method_ordering()->print_value_on(st); st->cr();
3721 if (default_methods() != nullptr) {
3722 st->print(BULLET"default_methods: "); default_methods()->print_value_on(st); st->cr();
3723 if (Verbose) {
3724 Array<Method*>* method_array = default_methods();
3725 for (int i = 0; i < method_array->length(); i++) {
3726 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3727 }
3728 }
3729 }
3730 print_on_maybe_null(st, BULLET"default vtable indices: ", default_vtable_indices());
3731 st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr();
3732 st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
3733
3734 st->print(BULLET"secondary supers: "); secondary_supers()->print_value_on(st); st->cr();
3735
3736 st->print(BULLET"hash_slot: %d", hash_slot()); st->cr();
3737 st->print(BULLET"secondary bitmap: " UINTX_FORMAT_X_0, _secondary_supers_bitmap); st->cr();
3738
3739 if (secondary_supers() != nullptr) {
3740 if (Verbose) {
3741 bool is_hashed = (_secondary_supers_bitmap != SECONDARY_SUPERS_BITMAP_FULL);
3742 st->print_cr(BULLET"---- secondary supers (%d words):", _secondary_supers->length());
3743 for (int i = 0; i < _secondary_supers->length(); i++) {
3744 ResourceMark rm; // for external_name()
3745 Klass* secondary_super = _secondary_supers->at(i);
3746 st->print(BULLET"%2d:", i);
3747 if (is_hashed) {
3748 int home_slot = compute_home_slot(secondary_super, _secondary_supers_bitmap);
3749 int distance = (i - home_slot) & SECONDARY_SUPERS_TABLE_MASK;
3750 st->print(" dist:%02d:", distance);
3751 }
3752 st->print_cr(" %p %s", secondary_super, secondary_super->external_name());
3753 }
3754 }
3755 }
3756 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr();
3757
3758 print_on_maybe_null(st, BULLET"class loader data: ", class_loader_data());
3759 print_on_maybe_null(st, BULLET"source file: ", source_file_name());
3760 if (source_debug_extension() != nullptr) {
3761 st->print(BULLET"source debug extension: ");
3762 st->print("%s", source_debug_extension());
3763 st->cr();
3764 }
3765 print_on_maybe_null(st, BULLET"class annotations: ", class_annotations());
3766 print_on_maybe_null(st, BULLET"class type annotations: ", class_type_annotations());
3767 print_on_maybe_null(st, BULLET"field annotations: ", fields_annotations());
3768 print_on_maybe_null(st, BULLET"field type annotations: ", fields_type_annotations());
3769 {
3770 bool have_pv = false;
3771 // previous versions are linked together through the InstanceKlass
3772 for (InstanceKlass* pv_node = previous_versions();
3773 pv_node != nullptr;
3774 pv_node = pv_node->previous_versions()) {
3775 if (!have_pv)
3776 st->print(BULLET"previous version: ");
3777 have_pv = true;
3778 pv_node->constants()->print_value_on(st);
3779 }
3780 if (have_pv) st->cr();
3781 }
3782
3783 print_on_maybe_null(st, BULLET"generic signature: ", generic_signature());
3784 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr();
3785 st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr();
3786 print_on_maybe_null(st, BULLET"record components: ", record_components());
3787 st->print(BULLET"permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr();
3788 if (java_mirror() != nullptr) {
3789 st->print(BULLET"java mirror: ");
3790 java_mirror()->print_value_on(st);
3791 st->cr();
3792 } else {
3793 st->print_cr(BULLET"java mirror: null");
3794 }
3795 st->print(BULLET"vtable length %d (start addr: " PTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr();
3796 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st);
3797 st->print(BULLET"itable length %d (start addr: " PTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr();
3798 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_itable(), itable_length(), st);
3799 st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
3800
3801 FieldPrinter print_static_field(st);
3802 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
3803 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
3804 FieldPrinter print_nonstatic_field(st);
3805 InstanceKlass* ik = const_cast<InstanceKlass*>(this);
3806 ik->print_nonstatic_fields(&print_nonstatic_field);
3807
3808 st->print(BULLET"non-static oop maps (%d entries): ", nonstatic_oop_map_count());
3809 OopMapBlock* map = start_of_nonstatic_oop_maps();
3810 OopMapBlock* end_map = map + nonstatic_oop_map_count();
3811 while (map < end_map) {
3812 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
3813 map++;
3814 }
3815 st->cr();
3816
3817 if (fieldinfo_search_table() != nullptr) {
3818 st->print_cr(BULLET"---- field info search table:");
3819 FieldInfoStream::print_search_table(st, _constants, _fieldinfo_stream, _fieldinfo_search_table);
3820 }
3821 }
3822
3823 void InstanceKlass::print_value_on(outputStream* st) const {
3824 assert(is_klass(), "must be klass");
3825 if (Verbose || WizardMode) access_flags().print_on(st);
3826 name()->print_value_on(st);
3827 }
3828
3829 void FieldPrinter::do_field(fieldDescriptor* fd) {
3830 _st->print(BULLET);
3831 if (_obj == nullptr) {
3832 fd->print_on(_st);
3833 _st->cr();
3834 } else {
3835 fd->print_on_for(_st, _obj);
3836 _st->cr();
3837 }
3838 }
3839
3840
3841 void InstanceKlass::oop_print_on(oop obj, outputStream* st) {
3842 Klass::oop_print_on(obj, st);
3843
3844 if (this == vmClasses::String_klass()) {
3845 typeArrayOop value = java_lang_String::value(obj);
3846 juint length = java_lang_String::length(obj);
3847 if (value != nullptr &&
3848 value->is_typeArray() &&
3849 length <= (juint) value->length()) {
3850 st->print(BULLET"string: ");
3851 java_lang_String::print(obj, st);
3852 st->cr();
3853 }
3854 }
3855
3856 st->print_cr(BULLET"---- fields (total size %zu words):", oop_size(obj));
3857 FieldPrinter print_field(st, obj);
3858 print_nonstatic_fields(&print_field);
3859
3860 if (this == vmClasses::Class_klass()) {
3861 st->print(BULLET"signature: ");
3862 java_lang_Class::print_signature(obj, st);
3863 st->cr();
3864 Klass* real_klass = java_lang_Class::as_Klass(obj);
3865 if (real_klass != nullptr && real_klass->is_instance_klass()) {
3866 st->print_cr(BULLET"---- static fields (%d):", java_lang_Class::static_oop_field_count(obj));
3867 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
3868 }
3869 } else if (this == vmClasses::MethodType_klass()) {
3870 st->print(BULLET"signature: ");
3871 java_lang_invoke_MethodType::print_signature(obj, st);
3872 st->cr();
3873 }
3874 }
3875
3876 #ifndef PRODUCT
3877
3878 bool InstanceKlass::verify_itable_index(int i) {
3879 int method_count = klassItable::method_count_for_interface(this);
3880 assert(i >= 0 && i < method_count, "index out of bounds");
3881 return true;
3882 }
3883
3884 #endif //PRODUCT
3885
3886 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
3887 st->print("a ");
3888 name()->print_value_on(st);
3889 obj->print_address_on(st);
3890 if (this == vmClasses::String_klass()
3891 && java_lang_String::value(obj) != nullptr) {
3892 ResourceMark rm;
3893 int len = java_lang_String::length(obj);
3894 int plen = (len < 24 ? len : 12);
3895 char* str = java_lang_String::as_utf8_string(obj, 0, plen);
3896 st->print(" = \"%s\"", str);
3897 if (len > plen)
3898 st->print("...[%d]", len);
3899 } else if (this == vmClasses::Class_klass()) {
3900 Klass* k = java_lang_Class::as_Klass(obj);
3901 st->print(" = ");
3902 if (k != nullptr) {
3903 k->print_value_on(st);
3904 } else {
3905 const char* tname = type2name(java_lang_Class::primitive_type(obj));
3906 st->print("%s", tname ? tname : "type?");
3907 }
3908 } else if (this == vmClasses::MethodType_klass()) {
3909 st->print(" = ");
3910 java_lang_invoke_MethodType::print_signature(obj, st);
3911 } else if (java_lang_boxing_object::is_instance(obj)) {
3912 st->print(" = ");
3913 java_lang_boxing_object::print(obj, st);
3914 } else if (this == vmClasses::LambdaForm_klass()) {
3915 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
3916 if (vmentry != nullptr) {
3917 st->print(" => ");
3918 vmentry->print_value_on(st);
3919 }
3920 } else if (this == vmClasses::MemberName_klass()) {
3921 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
3922 if (vmtarget != nullptr) {
3923 st->print(" = ");
3924 vmtarget->print_value_on(st);
3925 } else {
3926 oop clazz = java_lang_invoke_MemberName::clazz(obj);
3927 oop name = java_lang_invoke_MemberName::name(obj);
3928 if (clazz != nullptr) {
3929 clazz->print_value_on(st);
3930 } else {
3931 st->print("null");
3932 }
3933 st->print(".");
3934 if (name != nullptr) {
3935 name->print_value_on(st);
3936 } else {
3937 st->print("null");
3938 }
3939 }
3940 }
3941 }
3942
3943 const char* InstanceKlass::internal_name() const {
3944 return external_name();
3945 }
3946
3947 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
3948 const ModuleEntry* module_entry,
3949 const ClassFileStream* cfs) const {
3950
3951 if (ClassListWriter::is_enabled()) {
3952 ClassListWriter::write(this, cfs);
3953 }
3954
3955 print_class_load_helper(loader_data, module_entry, cfs);
3956 print_class_load_cause_logging();
3957 }
3958
3959 void InstanceKlass::print_class_load_helper(ClassLoaderData* loader_data,
3960 const ModuleEntry* module_entry,
3961 const ClassFileStream* cfs) const {
3962
3963 if (!log_is_enabled(Info, class, load)) {
3964 return;
3965 }
3966
3967 ResourceMark rm;
3968 LogMessage(class, load) msg;
3969 stringStream info_stream;
3970
3971 // Name and class hierarchy info
3972 info_stream.print("%s", external_name());
3973
3974 // Source
3975 if (cfs != nullptr) {
3976 if (cfs->source() != nullptr) {
3977 const char* module_name = (module_entry->name() == nullptr) ? UNNAMED_MODULE : module_entry->name()->as_C_string();
3978 if (module_name != nullptr) {
3979 // When the boot loader created the stream, it didn't know the module name
3980 // yet. Let's format it now.
3981 if (cfs->from_boot_loader_modules_image()) {
3982 info_stream.print(" source: jrt:/%s", module_name);
3983 } else {
3984 info_stream.print(" source: %s", cfs->source());
3985 }
3986 } else {
3987 info_stream.print(" source: %s", cfs->source());
3988 }
3989 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
3990 Thread* current = Thread::current();
3991 Klass* caller = current->is_Java_thread() ?
3992 JavaThread::cast(current)->security_get_caller_class(1):
3993 nullptr;
3994 // caller can be null, for example, during a JVMTI VM_Init hook
3995 if (caller != nullptr) {
3996 info_stream.print(" source: instance of %s", caller->external_name());
3997 } else {
3998 // source is unknown
3999 }
4000 } else {
4001 oop class_loader = loader_data->class_loader();
4002 info_stream.print(" source: %s", class_loader->klass()->external_name());
4003 }
4004 } else {
4005 assert(this->in_aot_cache(), "must be");
4006 if (AOTMetaspace::in_aot_cache_dynamic_region((void*)this)) {
4007 info_stream.print(" source: shared objects file (top)");
4008 } else {
4009 info_stream.print(" source: shared objects file");
4010 }
4011 }
4012
4013 msg.info("%s", info_stream.as_string());
4014
4015 if (log_is_enabled(Debug, class, load)) {
4016 stringStream debug_stream;
4017
4018 // Class hierarchy info
4019 debug_stream.print(" klass: " PTR_FORMAT " super: " PTR_FORMAT,
4020 p2i(this), p2i(super()));
4021
4022 // Interfaces
4023 if (local_interfaces() != nullptr && local_interfaces()->length() > 0) {
4024 debug_stream.print(" interfaces:");
4025 int length = local_interfaces()->length();
4026 for (int i = 0; i < length; i++) {
4027 debug_stream.print(" " PTR_FORMAT,
4028 p2i(local_interfaces()->at(i)));
4029 }
4030 }
4031
4032 // Class loader
4033 debug_stream.print(" loader: [");
4034 loader_data->print_value_on(&debug_stream);
4035 debug_stream.print("]");
4036
4037 // Classfile checksum
4038 if (cfs) {
4039 debug_stream.print(" bytes: %d checksum: %08x",
4040 cfs->length(),
4041 ClassLoader::crc32(0, (const char*)cfs->buffer(),
4042 cfs->length()));
4043 }
4044
4045 msg.debug("%s", debug_stream.as_string());
4046 }
4047 }
4048
4049 void InstanceKlass::print_class_load_cause_logging() const {
4050 bool log_cause_native = log_is_enabled(Info, class, load, cause, native);
4051 if (log_cause_native || log_is_enabled(Info, class, load, cause)) {
4052 JavaThread* current = JavaThread::current();
4053 ResourceMark rm(current);
4054 const char* name = external_name();
4055
4056 if (LogClassLoadingCauseFor == nullptr ||
4057 (strcmp("*", LogClassLoadingCauseFor) != 0 &&
4058 strstr(name, LogClassLoadingCauseFor) == nullptr)) {
4059 return;
4060 }
4061
4062 // Log Java stack first
4063 {
4064 LogMessage(class, load, cause) msg;
4065 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4066
4067 info_stream.print_cr("Java stack when loading %s:", name);
4068 current->print_stack_on(&info_stream);
4069 }
4070
4071 // Log native stack second
4072 if (log_cause_native) {
4073 // Log to string first so that lines can be indented
4074 stringStream stack_stream;
4075 char buf[O_BUFLEN];
4076 address lastpc = nullptr;
4077 NativeStackPrinter nsp(current);
4078 nsp.print_stack(&stack_stream, buf, sizeof(buf), lastpc,
4079 true /* print_source_info */, -1 /* max stack */);
4080
4081 LogMessage(class, load, cause, native) msg;
4082 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4083 info_stream.print_cr("Native stack when loading %s:", name);
4084
4085 // Print each native stack line to the log
4086 int size = (int) stack_stream.size();
4087 char* stack = stack_stream.as_string();
4088 char* stack_end = stack + size;
4089 char* line_start = stack;
4090 for (char* p = stack; p < stack_end; p++) {
4091 if (*p == '\n') {
4092 *p = '\0';
4093 info_stream.print_cr("\t%s", line_start);
4094 line_start = p + 1;
4095 }
4096 }
4097 if (line_start < stack_end) {
4098 info_stream.print_cr("\t%s", line_start);
4099 }
4100 }
4101 }
4102 }
4103
4104 // Verification
4105
4106 class VerifyFieldClosure: public BasicOopIterateClosure {
4107 protected:
4108 template <class T> void do_oop_work(T* p) {
4109 oop obj = RawAccess<>::oop_load(p);
4110 if (!oopDesc::is_oop_or_null(obj)) {
4111 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj));
4112 Universe::print_on(tty);
4113 guarantee(false, "boom");
4114 }
4115 }
4116 public:
4117 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); }
4118 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
4119 };
4120
4121 void InstanceKlass::verify_on(outputStream* st) {
4122 #ifndef PRODUCT
4123 // Avoid redundant verifies, this really should be in product.
4124 if (_verify_count == Universe::verify_count()) return;
4125 _verify_count = Universe::verify_count();
4126 #endif
4127
4128 // Verify Klass
4129 Klass::verify_on(st);
4130
4131 // Verify that klass is present in ClassLoaderData
4132 guarantee(class_loader_data()->contains_klass(this),
4133 "this class isn't found in class loader data");
4134
4135 // Verify vtables
4136 if (is_linked()) {
4137 // $$$ This used to be done only for m/s collections. Doing it
4138 // always seemed a valid generalization. (DLD -- 6/00)
4139 vtable().verify(st);
4140 }
4141
4142 // Verify first subklass
4143 if (subklass() != nullptr) {
4144 guarantee(subklass()->is_klass(), "should be klass");
4145 }
4146
4147 // Verify siblings
4148 Klass* super = this->super();
4149 Klass* sib = next_sibling();
4150 if (sib != nullptr) {
4151 if (sib == this) {
4152 fatal("subclass points to itself " PTR_FORMAT, p2i(sib));
4153 }
4154
4155 guarantee(sib->is_klass(), "should be klass");
4156 guarantee(sib->super() == super, "siblings should have same superklass");
4157 }
4158
4159 // Verify local interfaces
4160 if (local_interfaces()) {
4161 Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
4162 for (int j = 0; j < local_interfaces->length(); j++) {
4163 InstanceKlass* e = local_interfaces->at(j);
4164 guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
4165 }
4166 }
4167
4168 // Verify transitive interfaces
4169 if (transitive_interfaces() != nullptr) {
4170 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
4171 for (int j = 0; j < transitive_interfaces->length(); j++) {
4172 InstanceKlass* e = transitive_interfaces->at(j);
4173 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
4174 }
4175 }
4176
4177 // Verify methods
4178 if (methods() != nullptr) {
4179 Array<Method*>* methods = this->methods();
4180 for (int j = 0; j < methods->length(); j++) {
4181 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4182 }
4183 for (int j = 0; j < methods->length() - 1; j++) {
4184 Method* m1 = methods->at(j);
4185 Method* m2 = methods->at(j + 1);
4186 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4187 }
4188 }
4189
4190 // Verify method ordering
4191 if (method_ordering() != nullptr) {
4192 Array<int>* method_ordering = this->method_ordering();
4193 int length = method_ordering->length();
4194 if (JvmtiExport::can_maintain_original_method_order() ||
4195 ((CDSConfig::is_using_archive() || CDSConfig::is_dumping_archive()) && length != 0)) {
4196 guarantee(length == methods()->length(), "invalid method ordering length");
4197 jlong sum = 0;
4198 for (int j = 0; j < length; j++) {
4199 int original_index = method_ordering->at(j);
4200 guarantee(original_index >= 0, "invalid method ordering index");
4201 guarantee(original_index < length, "invalid method ordering index");
4202 sum += original_index;
4203 }
4204 // Verify sum of indices 0,1,...,length-1
4205 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
4206 } else {
4207 guarantee(length == 0, "invalid method ordering length");
4208 }
4209 }
4210
4211 // Verify default methods
4212 if (default_methods() != nullptr) {
4213 Array<Method*>* methods = this->default_methods();
4214 for (int j = 0; j < methods->length(); j++) {
4215 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4216 }
4217 for (int j = 0; j < methods->length() - 1; j++) {
4218 Method* m1 = methods->at(j);
4219 Method* m2 = methods->at(j + 1);
4220 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4221 }
4222 }
4223
4224 // Verify JNI static field identifiers
4225 if (jni_ids() != nullptr) {
4226 jni_ids()->verify(this);
4227 }
4228
4229 // Verify other fields
4230 if (constants() != nullptr) {
4231 guarantee(constants()->is_constantPool(), "should be constant pool");
4232 }
4233 }
4234
4235 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
4236 Klass::oop_verify_on(obj, st);
4237 VerifyFieldClosure blk;
4238 obj->oop_iterate(&blk);
4239 }
4240
4241 // JNIid class for jfieldIDs only
4242 // Note to reviewers:
4243 // These JNI functions are just moved over to column 1 and not changed
4244 // in the compressed oops workspace.
4245 JNIid::JNIid(InstanceKlass* holder, int offset, JNIid* next) {
4246 _holder = holder;
4247 _offset = offset;
4248 _next = next;
4249 DEBUG_ONLY(_is_static_field_id = false;)
4250 }
4251
4252 JNIid* JNIid::find(int offset) {
4253 JNIid* current = this;
4254 while (current != nullptr) {
4255 if (current->offset() == offset) return current;
4256 current = current->next();
4257 }
4258 return nullptr;
4259 }
4260
4261 void JNIid::deallocate(JNIid* current) {
4262 while (current != nullptr) {
4263 JNIid* next = current->next();
4264 delete current;
4265 current = next;
4266 }
4267 }
4268
4269 void JNIid::verify(InstanceKlass* holder) {
4270 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields();
4271 int end_field_offset;
4272 end_field_offset = first_field_offset + (holder->static_field_size() * wordSize);
4273
4274 JNIid* current = this;
4275 while (current != nullptr) {
4276 guarantee(current->holder() == holder, "Invalid klass in JNIid");
4277 #ifdef ASSERT
4278 int o = current->offset();
4279 if (current->is_static_field_id()) {
4280 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid");
4281 }
4282 #endif
4283 current = current->next();
4284 }
4285 }
4286
4287 void InstanceKlass::set_init_state(ClassState state) {
4288 #ifdef ASSERT
4289 bool good_state = in_aot_cache() ? (_init_state <= state)
4290 : (_init_state < state);
4291 assert(good_state || state == allocated, "illegal state transition");
4292 #endif
4293 assert(_init_thread == nullptr, "should be cleared before state change");
4294 AtomicAccess::release_store(&_init_state, state);
4295 }
4296
4297 #if INCLUDE_JVMTI
4298
4299 // RedefineClasses() support for previous versions
4300
4301 // Globally, there is at least one previous version of a class to walk
4302 // during class unloading, which is saved because old methods in the class
4303 // are still running. Otherwise the previous version list is cleaned up.
4304 bool InstanceKlass::_should_clean_previous_versions = false;
4305
4306 // Returns true if there are previous versions of a class for class
4307 // unloading only. Also resets the flag to false. purge_previous_version
4308 // will set the flag to true if there are any left, i.e., if there's any
4309 // work to do for next time. This is to avoid the expensive code cache
4310 // walk in CLDG::clean_deallocate_lists().
4311 bool InstanceKlass::should_clean_previous_versions_and_reset() {
4312 bool ret = _should_clean_previous_versions;
4313 log_trace(redefine, class, iklass, purge)("Class unloading: should_clean_previous_versions = %s",
4314 ret ? "true" : "false");
4315 _should_clean_previous_versions = false;
4316 return ret;
4317 }
4318
4319 // This nulls out the jmethodID for all obsolete methods in the previous version of the 'klass'.
4320 // These obsolete methods only exist in the previous version and we're about to delete the memory for them.
4321 // The jmethodID for these are deallocated when we unload the class, so this doesn't remove them from the table.
4322 void InstanceKlass::clear_obsolete_jmethod_ids(InstanceKlass* klass) {
4323 Array<Method*>* method_refs = klass->methods();
4324 for (int k = 0; k < method_refs->length(); k++) {
4325 Method* method = method_refs->at(k);
4326 // Only need to clear obsolete methods.
4327 if (method != nullptr && method->is_obsolete()) {
4328 method->clear_jmethod_id();
4329 }
4330 }
4331 }
4332
4333 // Purge previous versions before adding new previous versions of the class and
4334 // during class unloading.
4335 void InstanceKlass::purge_previous_version_list() {
4336 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
4337 assert(has_been_redefined(), "Should only be called for main class");
4338
4339 // Quick exit.
4340 if (previous_versions() == nullptr) {
4341 return;
4342 }
4343
4344 // This klass has previous versions so see what we can cleanup
4345 // while it is safe to do so.
4346
4347 int deleted_count = 0; // leave debugging breadcrumbs
4348 int live_count = 0;
4349 ClassLoaderData* loader_data = class_loader_data();
4350 assert(loader_data != nullptr, "should never be null");
4351
4352 ResourceMark rm;
4353 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name());
4354
4355 // previous versions are linked together through the InstanceKlass
4356 InstanceKlass* pv_node = previous_versions();
4357 InstanceKlass* last = this;
4358 int version = 0;
4359
4360 // check the previous versions list
4361 for (; pv_node != nullptr; ) {
4362
4363 ConstantPool* pvcp = pv_node->constants();
4364 assert(pvcp != nullptr, "cp ref was unexpectedly cleared");
4365
4366 if (!pvcp->on_stack()) {
4367 // If the constant pool isn't on stack, none of the methods
4368 // are executing. Unlink this previous_version.
4369 // The previous version InstanceKlass is on the ClassLoaderData deallocate list
4370 // so will be deallocated during the next phase of class unloading.
4371 log_trace(redefine, class, iklass, purge)
4372 ("previous version " PTR_FORMAT " is dead.", p2i(pv_node));
4373 // Unlink from previous version list.
4374 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data");
4375 InstanceKlass* next = pv_node->previous_versions();
4376 clear_obsolete_jmethod_ids(pv_node); // jmethodID maintenance for the unloaded class
4377 pv_node->link_previous_versions(nullptr); // point next to null
4378 last->link_previous_versions(next);
4379 // Delete this node directly. Nothing is referring to it and we don't
4380 // want it to increase the counter for metadata to delete in CLDG.
4381 MetadataFactory::free_metadata(loader_data, pv_node);
4382 pv_node = next;
4383 deleted_count++;
4384 version++;
4385 continue;
4386 } else {
4387 assert(pvcp->pool_holder() != nullptr, "Constant pool with no holder");
4388 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
4389 live_count++;
4390 if (pvcp->in_aot_cache()) {
4391 // Shared previous versions can never be removed so no cleaning is needed.
4392 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is shared", p2i(pv_node));
4393 } else {
4394 // Previous version alive, set that clean is needed for next time.
4395 _should_clean_previous_versions = true;
4396 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is alive", p2i(pv_node));
4397 }
4398 }
4399
4400 // next previous version
4401 last = pv_node;
4402 pv_node = pv_node->previous_versions();
4403 version++;
4404 }
4405 log_trace(redefine, class, iklass, purge)
4406 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
4407 }
4408
4409 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
4410 int emcp_method_count) {
4411 int obsolete_method_count = old_methods->length() - emcp_method_count;
4412
4413 if (emcp_method_count != 0 && obsolete_method_count != 0 &&
4414 _previous_versions != nullptr) {
4415 // We have a mix of obsolete and EMCP methods so we have to
4416 // clear out any matching EMCP method entries the hard way.
4417 int local_count = 0;
4418 for (int i = 0; i < old_methods->length(); i++) {
4419 Method* old_method = old_methods->at(i);
4420 if (old_method->is_obsolete()) {
4421 // only obsolete methods are interesting
4422 Symbol* m_name = old_method->name();
4423 Symbol* m_signature = old_method->signature();
4424
4425 // previous versions are linked together through the InstanceKlass
4426 int j = 0;
4427 for (InstanceKlass* prev_version = _previous_versions;
4428 prev_version != nullptr;
4429 prev_version = prev_version->previous_versions(), j++) {
4430
4431 Array<Method*>* method_refs = prev_version->methods();
4432 for (int k = 0; k < method_refs->length(); k++) {
4433 Method* method = method_refs->at(k);
4434
4435 if (!method->is_obsolete() &&
4436 method->name() == m_name &&
4437 method->signature() == m_signature) {
4438 // The current RedefineClasses() call has made all EMCP
4439 // versions of this method obsolete so mark it as obsolete
4440 log_trace(redefine, class, iklass, add)
4441 ("%s(%s): flush obsolete method @%d in version @%d",
4442 m_name->as_C_string(), m_signature->as_C_string(), k, j);
4443
4444 method->set_is_obsolete();
4445 break;
4446 }
4447 }
4448
4449 // The previous loop may not find a matching EMCP method, but
4450 // that doesn't mean that we can optimize and not go any
4451 // further back in the PreviousVersion generations. The EMCP
4452 // method for this generation could have already been made obsolete,
4453 // but there still may be an older EMCP method that has not
4454 // been made obsolete.
4455 }
4456
4457 if (++local_count >= obsolete_method_count) {
4458 // no more obsolete methods so bail out now
4459 break;
4460 }
4461 }
4462 }
4463 }
4464 }
4465
4466 // Save the scratch_class as the previous version if any of the methods are running.
4467 // The previous_versions are used to set breakpoints in EMCP methods and they are
4468 // also used to clean MethodData links to redefined methods that are no longer running.
4469 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
4470 int emcp_method_count) {
4471 assert(Thread::current()->is_VM_thread(),
4472 "only VMThread can add previous versions");
4473
4474 ResourceMark rm;
4475 log_trace(redefine, class, iklass, add)
4476 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
4477
4478 // Clean out old previous versions for this class
4479 purge_previous_version_list();
4480
4481 // Mark newly obsolete methods in remaining previous versions. An EMCP method from
4482 // a previous redefinition may be made obsolete by this redefinition.
4483 Array<Method*>* old_methods = scratch_class->methods();
4484 mark_newly_obsolete_methods(old_methods, emcp_method_count);
4485
4486 // If the constant pool for this previous version of the class
4487 // is not marked as being on the stack, then none of the methods
4488 // in this previous version of the class are on the stack so
4489 // we don't need to add this as a previous version.
4490 ConstantPool* cp_ref = scratch_class->constants();
4491 if (!cp_ref->on_stack()) {
4492 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
4493 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
4494 return;
4495 }
4496
4497 // Add previous version if any methods are still running or if this is
4498 // a shared class which should never be removed.
4499 assert(scratch_class->previous_versions() == nullptr, "shouldn't have a previous version");
4500 scratch_class->link_previous_versions(previous_versions());
4501 link_previous_versions(scratch_class);
4502 if (cp_ref->in_aot_cache()) {
4503 log_trace(redefine, class, iklass, add) ("scratch class added; class is shared");
4504 } else {
4505 // We only set clean_previous_versions flag for processing during class
4506 // unloading for non-shared classes.
4507 _should_clean_previous_versions = true;
4508 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
4509 }
4510 } // end add_previous_version()
4511
4512 #endif // INCLUDE_JVMTI
4513
4514 Method* InstanceKlass::method_with_idnum(int idnum) const {
4515 Method* m = nullptr;
4516 if (idnum < methods()->length()) {
4517 m = methods()->at(idnum);
4518 }
4519 if (m == nullptr || m->method_idnum() != idnum) {
4520 for (int index = 0; index < methods()->length(); ++index) {
4521 m = methods()->at(index);
4522 if (m->method_idnum() == idnum) {
4523 return m;
4524 }
4525 }
4526 // None found, return null for the caller to handle.
4527 return nullptr;
4528 }
4529 return m;
4530 }
4531
4532
4533 Method* InstanceKlass::method_with_orig_idnum(int idnum) const {
4534 if (idnum >= methods()->length()) {
4535 return nullptr;
4536 }
4537 Method* m = methods()->at(idnum);
4538 if (m != nullptr && m->orig_method_idnum() == idnum) {
4539 return m;
4540 }
4541 // Obsolete method idnum does not match the original idnum
4542 for (int index = 0; index < methods()->length(); ++index) {
4543 m = methods()->at(index);
4544 if (m->orig_method_idnum() == idnum) {
4545 return m;
4546 }
4547 }
4548 // None found, return null for the caller to handle.
4549 return nullptr;
4550 }
4551
4552
4553 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) const {
4554 const InstanceKlass* holder = get_klass_version(version);
4555 if (holder == nullptr) {
4556 return nullptr; // The version of klass is gone, no method is found
4557 }
4558 return holder->method_with_orig_idnum(idnum);
4559 }
4560
4561 #if INCLUDE_JVMTI
4562 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
4563 return _cached_class_file;
4564 }
4565
4566 jint InstanceKlass::get_cached_class_file_len() {
4567 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
4568 }
4569
4570 unsigned char * InstanceKlass::get_cached_class_file_bytes() {
4571 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
4572 }
4573 #endif
4574
4575 // Make a step iterating over the class hierarchy under the root class.
4576 // Skips subclasses if requested.
4577 void ClassHierarchyIterator::next() {
4578 assert(_current != nullptr, "required");
4579 if (_visit_subclasses && _current->subklass() != nullptr) {
4580 _current = _current->subklass();
4581 return; // visit next subclass
4582 }
4583 _visit_subclasses = true; // reset
4584 while (_current->next_sibling() == nullptr && _current != _root) {
4585 _current = _current->java_super(); // backtrack; no more sibling subclasses left
4586 }
4587 if (_current == _root) {
4588 // Iteration is over (back at root after backtracking). Invalidate the iterator.
4589 _current = nullptr;
4590 return;
4591 }
4592 _current = _current->next_sibling();
4593 return; // visit next sibling subclass
4594 }