1 /*
2 * Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotClassInitializer.hpp"
26 #include "cds/aotLinkedClassBulkLoader.hpp"
27 #include "cds/aotMetaspace.hpp"
28 #include "cds/archiveUtils.hpp"
29 #include "cds/cdsConfig.hpp"
30 #include "cds/cdsEnumKlass.hpp"
31 #include "cds/classListWriter.hpp"
32 #include "cds/heapShared.hpp"
33 #include "classfile/classFileParser.hpp"
34 #include "classfile/classFileStream.hpp"
35 #include "classfile/classLoader.hpp"
36 #include "classfile/classLoaderData.inline.hpp"
37 #include "classfile/javaClasses.hpp"
38 #include "classfile/moduleEntry.hpp"
39 #include "classfile/systemDictionary.hpp"
40 #include "classfile/systemDictionaryShared.hpp"
41 #include "classfile/verifier.hpp"
42 #include "classfile/vmClasses.hpp"
43 #include "classfile/vmSymbols.hpp"
44 #include "code/codeCache.hpp"
45 #include "code/dependencyContext.hpp"
46 #include "compiler/compilationPolicy.hpp"
47 #include "compiler/compileBroker.hpp"
48 #include "gc/shared/collectedHeap.inline.hpp"
49 #include "interpreter/bytecodeStream.hpp"
50 #include "interpreter/oopMapCache.hpp"
51 #include "interpreter/rewriter.hpp"
52 #include "jvm.h"
53 #include "jvmtifiles/jvmti.h"
54 #include "klass.inline.hpp"
55 #include "logging/log.hpp"
56 #include "logging/logMessage.hpp"
57 #include "logging/logStream.hpp"
58 #include "memory/allocation.inline.hpp"
59 #include "memory/iterator.inline.hpp"
60 #include "memory/metadataFactory.hpp"
61 #include "memory/metaspaceClosure.hpp"
62 #include "memory/oopFactory.hpp"
63 #include "memory/resourceArea.hpp"
64 #include "memory/universe.hpp"
65 #include "oops/constantPool.hpp"
66 #include "oops/fieldStreams.inline.hpp"
67 #include "oops/inlineKlass.hpp"
68 #include "oops/instanceClassLoaderKlass.hpp"
69 #include "oops/instanceKlass.inline.hpp"
70 #include "oops/instanceMirrorKlass.hpp"
71 #include "oops/instanceOop.hpp"
72 #include "oops/instanceStackChunkKlass.hpp"
73 #include "oops/klass.inline.hpp"
74 #include "oops/layoutKind.hpp"
75 #include "oops/markWord.hpp"
76 #include "oops/method.hpp"
77 #include "oops/oop.inline.hpp"
78 #include "oops/recordComponent.hpp"
79 #include "oops/refArrayKlass.hpp"
80 #include "oops/symbol.hpp"
81 #include "prims/jvmtiExport.hpp"
82 #include "prims/jvmtiRedefineClasses.hpp"
83 #include "prims/jvmtiThreadState.hpp"
84 #include "prims/methodComparator.hpp"
85 #include "runtime/arguments.hpp"
86 #include "runtime/atomicAccess.hpp"
87 #include "runtime/deoptimization.hpp"
88 #include "runtime/fieldDescriptor.inline.hpp"
89 #include "runtime/handles.inline.hpp"
90 #include "runtime/javaCalls.hpp"
91 #include "runtime/javaThread.inline.hpp"
92 #include "runtime/mutexLocker.hpp"
93 #include "runtime/orderAccess.hpp"
94 #include "runtime/os.inline.hpp"
95 #include "runtime/reflection.hpp"
96 #include "runtime/synchronizer.hpp"
97 #include "runtime/threads.hpp"
98 #include "services/classLoadingService.hpp"
99 #include "services/finalizerService.hpp"
100 #include "services/threadService.hpp"
101 #include "utilities/dtrace.hpp"
102 #include "utilities/events.hpp"
103 #include "utilities/macros.hpp"
104 #include "utilities/nativeStackPrinter.hpp"
105 #include "utilities/ostream.hpp"
106 #include "utilities/stringUtils.hpp"
107 #ifdef COMPILER1
108 #include "c1/c1_Compiler.hpp"
109 #endif
110 #if INCLUDE_JFR
111 #include "jfr/jfrEvents.hpp"
112 #endif
113
114 #ifdef DTRACE_ENABLED
115
116
117 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
118 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
119 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
120 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
121 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
122 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
123 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
124 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
125 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \
126 { \
127 char* data = nullptr; \
128 int len = 0; \
129 Symbol* clss_name = name(); \
130 if (clss_name != nullptr) { \
131 data = (char*)clss_name->bytes(); \
132 len = clss_name->utf8_length(); \
133 } \
134 HOTSPOT_CLASS_INITIALIZATION_##type( \
135 data, len, (void*)class_loader(), thread_type); \
136 }
137
138 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \
139 { \
140 char* data = nullptr; \
141 int len = 0; \
142 Symbol* clss_name = name(); \
143 if (clss_name != nullptr) { \
144 data = (char*)clss_name->bytes(); \
145 len = clss_name->utf8_length(); \
146 } \
147 HOTSPOT_CLASS_INITIALIZATION_##type( \
148 data, len, (void*)class_loader(), thread_type, wait); \
149 }
150
151 #else // ndef DTRACE_ENABLED
152
153 #define DTRACE_CLASSINIT_PROBE(type, thread_type)
154 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
155
156 #endif // ndef DTRACE_ENABLED
157
158 void InlineLayoutInfo::metaspace_pointers_do(MetaspaceClosure* it) {
159 log_trace(cds)("Iter(InlineFieldInfo): %p", this);
160 it->push(&_klass);
161 }
162
163 void InlineLayoutInfo::print() const {
164 print_on(tty);
165 }
166
167 void InlineLayoutInfo::print_on(outputStream* st) const {
168 st->print_cr("_klass: " PTR_FORMAT, p2i(_klass));
169 if (_klass != nullptr) {
170 StreamIndentor si(st);
171 _klass->print_on(st);
172 st->cr();
173 }
174
175 st->print("_layout: ");
176 LayoutKindHelper::print_on(_kind, st);
177 st->cr();
178
179 st->print("_null_marker_offset: %d", _null_marker_offset);
180 }
181
182 bool InstanceKlass::_finalization_enabled = true;
183 static int call_class_initializer_counter = 0; // for debugging
184
185 static inline bool is_class_loader(const Symbol* class_name,
186 const ClassFileParser& parser) {
187 assert(class_name != nullptr, "invariant");
188
189 if (class_name == vmSymbols::java_lang_ClassLoader()) {
190 return true;
191 }
192
193 if (vmClasses::ClassLoader_klass_is_loaded()) {
194 const Klass* const super_klass = parser.super_klass();
195 if (super_klass != nullptr) {
196 if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) {
197 return true;
198 }
199 }
200 }
201 return false;
202 }
203
204 bool InstanceKlass::field_is_null_free_inline_type(int index) const {
205 return field(index).field_flags().is_null_free_inline_type();
206 }
207
208 bool InstanceKlass::is_class_in_loadable_descriptors_attribute(Symbol* name) const {
209 if (_loadable_descriptors == nullptr) return false;
210 for (int i = 0; i < _loadable_descriptors->length(); i++) {
211 Symbol* class_name = _constants->symbol_at(_loadable_descriptors->at(i));
212 if (class_name == name) return true;
213 }
214 return false;
215 }
216
217 static inline bool is_stack_chunk_class(const Symbol* class_name,
218 const ClassLoaderData* loader_data) {
219 return (class_name == vmSymbols::jdk_internal_vm_StackChunk() &&
220 loader_data->is_the_null_class_loader_data());
221 }
222
223 // private: called to verify that k is a static member of this nest.
224 // We know that k is an instance class in the same package and hence the
225 // same classloader.
226 bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const {
227 assert(!is_hidden(), "unexpected hidden class");
228 if (_nest_members == nullptr || _nest_members == Universe::the_empty_short_array()) {
229 if (log_is_enabled(Trace, class, nestmates)) {
230 ResourceMark rm(current);
231 log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s",
232 k->external_name(), this->external_name());
233 }
234 return false;
235 }
236
237 if (log_is_enabled(Trace, class, nestmates)) {
238 ResourceMark rm(current);
239 log_trace(class, nestmates)("Checking nest membership of %s in %s",
240 k->external_name(), this->external_name());
241 }
242
243 // Check for the named class in _nest_members.
244 // We don't resolve, or load, any classes.
245 for (int i = 0; i < _nest_members->length(); i++) {
246 int cp_index = _nest_members->at(i);
247 Symbol* name = _constants->klass_name_at(cp_index);
248 if (name == k->name()) {
249 log_trace(class, nestmates)("- named class found at nest_members[%d] => cp[%d]", i, cp_index);
250 return true;
251 }
252 }
253 log_trace(class, nestmates)("- class is NOT a nest member!");
254 return false;
255 }
256
257 // Called to verify that k is a permitted subclass of this class.
258 // The incoming stringStream is used to format the messages for error logging and for the caller
259 // to use for exception throwing.
260 bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k, stringStream& ss) const {
261 Thread* current = Thread::current();
262 assert(k != nullptr, "sanity check");
263 assert(_permitted_subclasses != nullptr && _permitted_subclasses != Universe::the_empty_short_array(),
264 "unexpected empty _permitted_subclasses array");
265
266 if (log_is_enabled(Trace, class, sealed)) {
267 ResourceMark rm(current);
268 log_trace(class, sealed)("Checking for permitted subclass %s in %s",
269 k->external_name(), this->external_name());
270 }
271
272 // Check that the class and its super are in the same module.
273 if (k->module() != this->module()) {
274 ss.print("Failed same module check: subclass %s is in module '%s' with loader %s, "
275 "and sealed class %s is in module '%s' with loader %s",
276 k->external_name(),
277 k->module()->name_as_C_string(),
278 k->module()->loader_data()->loader_name_and_id(),
279 this->external_name(),
280 this->module()->name_as_C_string(),
281 this->module()->loader_data()->loader_name_and_id());
282 log_trace(class, sealed)(" - %s", ss.as_string());
283 return false;
284 }
285
286 if (!k->is_public() && !is_same_class_package(k)) {
287 ss.print("Failed same package check: non-public subclass %s is in package '%s' with classloader %s, "
288 "and sealed class %s is in package '%s' with classloader %s",
289 k->external_name(),
290 k->package() != nullptr ? k->package()->name()->as_C_string() : "unnamed",
291 k->module()->loader_data()->loader_name_and_id(),
292 this->external_name(),
293 this->package() != nullptr ? this->package()->name()->as_C_string() : "unnamed",
294 this->module()->loader_data()->loader_name_and_id());
295 log_trace(class, sealed)(" - %s", ss.as_string());
296 return false;
297 }
298
299 for (int i = 0; i < _permitted_subclasses->length(); i++) {
300 int cp_index = _permitted_subclasses->at(i);
301 Symbol* name = _constants->klass_name_at(cp_index);
302 if (name == k->name()) {
303 log_trace(class, sealed)("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index);
304 return true;
305 }
306 }
307
308 ss.print("Failed listed permitted subclass check: class %s is not a permitted subclass of %s",
309 k->external_name(), this->external_name());
310 log_trace(class, sealed)(" - %s", ss.as_string());
311 return false;
312 }
313
314 // Return nest-host class, resolving, validating and saving it if needed.
315 // In cases where this is called from a thread that cannot do classloading
316 // (such as a native JIT thread) then we simply return null, which in turn
317 // causes the access check to return false. Such code will retry the access
318 // from a more suitable environment later. Otherwise the _nest_host is always
319 // set once this method returns.
320 // Any errors from nest-host resolution must be preserved so they can be queried
321 // from higher-level access checking code, and reported as part of access checking
322 // exceptions.
323 // VirtualMachineErrors are propagated with a null return.
324 // Under any conditions where the _nest_host can be set to non-null the resulting
325 // value of it and, if applicable, the nest host resolution/validation error,
326 // are idempotent.
327 InstanceKlass* InstanceKlass::nest_host(TRAPS) {
328 InstanceKlass* nest_host_k = _nest_host;
329 if (nest_host_k != nullptr) {
330 return nest_host_k;
331 }
332
333 ResourceMark rm(THREAD);
334
335 // need to resolve and save our nest-host class.
336 if (_nest_host_index != 0) { // we have a real nest_host
337 // Before trying to resolve check if we're in a suitable context
338 bool can_resolve = THREAD->can_call_java();
339 if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) {
340 log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread",
341 this->external_name());
342 return nullptr; // sentinel to say "try again from a different context"
343 }
344
345 log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s",
346 this->external_name(),
347 _constants->klass_name_at(_nest_host_index)->as_C_string());
348
349 Klass* k = _constants->klass_at(_nest_host_index, THREAD);
350 if (HAS_PENDING_EXCEPTION) {
351 if (PENDING_EXCEPTION->is_a(vmClasses::VirtualMachineError_klass())) {
352 return nullptr; // propagate VMEs
353 }
354 stringStream ss;
355 char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string();
356 ss.print("Nest host resolution of %s with host %s failed: ",
357 this->external_name(), target_host_class);
358 java_lang_Throwable::print(PENDING_EXCEPTION, &ss);
359 constantPoolHandle cph(THREAD, constants());
360 SystemDictionary::add_nest_host_error(cph, _nest_host_index, ss);
361 CLEAR_PENDING_EXCEPTION;
362
363 log_trace(class, nestmates)("%s", ss.base());
364 } else {
365 // A valid nest-host is an instance class in the current package that lists this
366 // class as a nest member. If any of these conditions are not met the class is
367 // its own nest-host.
368 const char* error = nullptr;
369
370 // JVMS 5.4.4 indicates package check comes first
371 if (is_same_class_package(k)) {
372 // Now check actual membership. We can't be a member if our "host" is
373 // not an instance class.
374 if (k->is_instance_klass()) {
375 nest_host_k = InstanceKlass::cast(k);
376 bool is_member = nest_host_k->has_nest_member(THREAD, this);
377 if (is_member) {
378 _nest_host = nest_host_k; // save resolved nest-host value
379
380 log_trace(class, nestmates)("Resolved nest-host of %s to %s",
381 this->external_name(), k->external_name());
382 return nest_host_k;
383 } else {
384 error = "current type is not listed as a nest member";
385 }
386 } else {
387 error = "host is not an instance class";
388 }
389 } else {
390 error = "types are in different packages";
391 }
392
393 // something went wrong, so record what and log it
394 {
395 stringStream ss;
396 ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s",
397 this->external_name(),
398 this->class_loader_data()->loader_name_and_id(),
399 k->external_name(),
400 k->class_loader_data()->loader_name_and_id(),
401 error);
402 constantPoolHandle cph(THREAD, constants());
403 SystemDictionary::add_nest_host_error(cph, _nest_host_index, ss);
404 log_trace(class, nestmates)("%s", ss.base());
405 }
406 }
407 } else {
408 log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self",
409 this->external_name());
410 }
411
412 // Either not in an explicit nest, or else an error occurred, so
413 // the nest-host is set to `this`. Any thread that sees this assignment
414 // will also see any setting of nest_host_error(), if applicable.
415 return (_nest_host = this);
416 }
417
418 // Dynamic nest member support: set this class's nest host to the given class.
419 // This occurs as part of the class definition, as soon as the instanceKlass
420 // has been created and doesn't require further resolution. The code:
421 // lookup().defineHiddenClass(bytes_for_X, NESTMATE);
422 // results in:
423 // class_of_X.set_nest_host(lookup().lookupClass().getNestHost())
424 // If it has an explicit _nest_host_index or _nest_members, these will be ignored.
425 // We also know the "host" is a valid nest-host in the same package so we can
426 // assert some of those facts.
427 void InstanceKlass::set_nest_host(InstanceKlass* host) {
428 assert(is_hidden(), "must be a hidden class");
429 assert(host != nullptr, "null nest host specified");
430 assert(_nest_host == nullptr, "current class has resolved nest-host");
431 assert(nest_host_error() == nullptr, "unexpected nest host resolution error exists: %s",
432 nest_host_error());
433 assert((host->_nest_host == nullptr && host->_nest_host_index == 0) ||
434 (host->_nest_host == host), "proposed host is not a valid nest-host");
435 // Can't assert this as package is not set yet:
436 // assert(is_same_class_package(host), "proposed host is in wrong package");
437
438 if (log_is_enabled(Trace, class, nestmates)) {
439 ResourceMark rm;
440 const char* msg = "";
441 // a hidden class does not expect a statically defined nest-host
442 if (_nest_host_index > 0) {
443 msg = "(the NestHost attribute in the current class is ignored)";
444 } else if (_nest_members != nullptr && _nest_members != Universe::the_empty_short_array()) {
445 msg = "(the NestMembers attribute in the current class is ignored)";
446 }
447 log_trace(class, nestmates)("Injected type %s into the nest of %s %s",
448 this->external_name(),
449 host->external_name(),
450 msg);
451 }
452 // set dynamic nest host
453 _nest_host = host;
454 // Record dependency to keep nest host from being unloaded before this class.
455 ClassLoaderData* this_key = class_loader_data();
456 assert(this_key != nullptr, "sanity");
457 this_key->record_dependency(host);
458 }
459
460 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
461 // or we are k's nest_host - all of which is covered by comparing the two
462 // resolved_nest_hosts.
463 // Any exceptions (i.e. VMEs) are propagated.
464 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) {
465
466 assert(this != k, "this should be handled by higher-level code");
467
468 // Per JVMS 5.4.4 we first resolve and validate the current class, then
469 // the target class k.
470
471 InstanceKlass* cur_host = nest_host(CHECK_false);
472 if (cur_host == nullptr) {
473 return false;
474 }
475
476 Klass* k_nest_host = k->nest_host(CHECK_false);
477 if (k_nest_host == nullptr) {
478 return false;
479 }
480
481 bool access = (cur_host == k_nest_host);
482
483 ResourceMark rm(THREAD);
484 log_trace(class, nestmates)("Class %s does %shave nestmate access to %s",
485 this->external_name(),
486 access ? "" : "NOT ",
487 k->external_name());
488 return access;
489 }
490
491 const char* InstanceKlass::nest_host_error() {
492 if (_nest_host_index == 0) {
493 return nullptr;
494 } else {
495 constantPoolHandle cph(Thread::current(), constants());
496 return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index);
497 }
498 }
499
500 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) {
501 const int size = InstanceKlass::size(parser.vtable_size(),
502 parser.itable_size(),
503 nonstatic_oop_map_size(parser.total_oop_map_count()),
504 parser.is_interface(),
505 parser.is_inline_type());
506
507 const Symbol* const class_name = parser.class_name();
508 assert(class_name != nullptr, "invariant");
509 ClassLoaderData* loader_data = parser.loader_data();
510 assert(loader_data != nullptr, "invariant");
511
512 InstanceKlass* ik;
513
514 // Allocation
515 if (parser.is_instance_ref_klass()) {
516 // java.lang.ref.Reference
517 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser);
518 } else if (class_name == vmSymbols::java_lang_Class()) {
519 // mirror - java.lang.Class
520 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser);
521 } else if (is_stack_chunk_class(class_name, loader_data)) {
522 // stack chunk
523 ik = new (loader_data, size, THREAD) InstanceStackChunkKlass(parser);
524 } else if (is_class_loader(class_name, parser)) {
525 // class loader - java.lang.ClassLoader
526 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser);
527 } else if (parser.is_inline_type()) {
528 // inline type
529 ik = new (loader_data, size, THREAD) InlineKlass(parser);
530 } else {
531 // normal
532 ik = new (loader_data, size, THREAD) InstanceKlass(parser);
533 }
534
535 assert(ik == nullptr || CompressedKlassPointers::is_encodable(ik),
536 "Klass " PTR_FORMAT "needs a narrow Klass ID, but is not encodable", p2i(ik));
537
538 // Check for pending exception before adding to the loader data and incrementing
539 // class count. Can get OOM here.
540 if (HAS_PENDING_EXCEPTION) {
541 return nullptr;
542 }
543
544 #ifdef ASSERT
545 ik->bounds_check((address) ik->start_of_vtable(), false, size);
546 ik->bounds_check((address) ik->start_of_itable(), false, size);
547 ik->bounds_check((address) ik->end_of_itable(), true, size);
548 ik->bounds_check((address) ik->end_of_nonstatic_oop_maps(), true, size);
549 #endif //ASSERT
550 return ik;
551 }
552
553 #ifndef PRODUCT
554 bool InstanceKlass::bounds_check(address addr, bool edge_ok, intptr_t size_in_bytes) const {
555 const char* bad = nullptr;
556 address end = nullptr;
557 if (addr < (address)this) {
558 bad = "before";
559 } else if (addr == (address)this) {
560 if (edge_ok) return true;
561 bad = "just before";
562 } else if (addr == (end = (address)this + sizeof(intptr_t) * (size_in_bytes < 0 ? size() : size_in_bytes))) {
563 if (edge_ok) return true;
564 bad = "just after";
565 } else if (addr > end) {
566 bad = "after";
567 } else {
568 return true;
569 }
570 tty->print_cr("%s object bounds: " INTPTR_FORMAT " [" INTPTR_FORMAT ".." INTPTR_FORMAT "]",
571 bad, (intptr_t)addr, (intptr_t)this, (intptr_t)end);
572 Verbose = WizardMode = true; this->print(); //@@
573 return false;
574 }
575 #endif //PRODUCT
576
577 // copy method ordering from resource area to Metaspace
578 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) {
579 if (m != nullptr) {
580 // allocate a new array and copy contents (memcpy?)
581 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
582 for (int i = 0; i < m->length(); i++) {
583 _method_ordering->at_put(i, m->at(i));
584 }
585 } else {
586 _method_ordering = Universe::the_empty_int_array();
587 }
588 }
589
590 // create a new array of vtable_indices for default methods
591 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
592 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
593 assert(default_vtable_indices() == nullptr, "only create once");
594 set_default_vtable_indices(vtable_indices);
595 return vtable_indices;
596 }
597
598
599 InstanceKlass::InstanceKlass() {
600 assert(CDSConfig::is_dumping_static_archive() || CDSConfig::is_using_archive(), "only for CDS");
601 }
602
603 InstanceKlass::InstanceKlass(const ClassFileParser& parser, KlassKind kind, markWord prototype_header, ReferenceType reference_type) :
604 Klass(kind, prototype_header),
605 _nest_members(nullptr),
606 _nest_host(nullptr),
607 _permitted_subclasses(nullptr),
608 _record_components(nullptr),
609 _static_field_size(parser.static_field_size()),
610 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
611 _itable_len(parser.itable_size()),
612 _nest_host_index(0),
613 _init_state(allocated),
614 _reference_type(reference_type),
615 _acmp_maps_offset(0),
616 _init_thread(nullptr),
617 _inline_layout_info_array(nullptr),
618 _loadable_descriptors(nullptr),
619 _acmp_maps_array(nullptr),
620 _adr_inline_klass_members(nullptr)
621 {
622 set_vtable_length(parser.vtable_size());
623 set_access_flags(parser.access_flags());
624 if (parser.is_hidden()) set_is_hidden();
625 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
626 false));
627 if (parser.has_inlined_fields()) {
628 set_has_inlined_fields();
629 }
630
631 assert(nullptr == _methods, "underlying memory not zeroed?");
632 assert(is_instance_klass(), "is layout incorrect?");
633 assert(size_helper() == parser.layout_size(), "incorrect size_helper?");
634 }
635
636 void InstanceKlass::set_is_cloneable() {
637 if (name() == vmSymbols::java_lang_invoke_MemberName()) {
638 assert(is_final(), "no subclasses allowed");
639 // MemberName cloning should not be intrinsified and always happen in JVM_Clone.
640 } else if (reference_type() != REF_NONE) {
641 // Reference cloning should not be intrinsified and always happen in JVM_Clone.
642 } else {
643 set_is_cloneable_fast();
644 }
645 }
646
647 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
648 Array<Method*>* methods) {
649 if (methods != nullptr && methods != Universe::the_empty_method_array() &&
650 !methods->in_aot_cache()) {
651 for (int i = 0; i < methods->length(); i++) {
652 Method* method = methods->at(i);
653 if (method == nullptr) continue; // maybe null if error processing
654 // Only want to delete methods that are not executing for RedefineClasses.
655 // The previous version will point to them so they're not totally dangling
656 assert (!method->on_stack(), "shouldn't be called with methods on stack");
657 MetadataFactory::free_metadata(loader_data, method);
658 }
659 MetadataFactory::free_array<Method*>(loader_data, methods);
660 }
661 }
662
663 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
664 const InstanceKlass* super_klass,
665 Array<InstanceKlass*>* local_interfaces,
666 Array<InstanceKlass*>* transitive_interfaces) {
667 // Only deallocate transitive interfaces if not empty, same as super class
668 // or same as local interfaces. See code in parseClassFile.
669 Array<InstanceKlass*>* ti = transitive_interfaces;
670 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
671 // check that the interfaces don't come from super class
672 Array<InstanceKlass*>* sti = (super_klass == nullptr) ? nullptr :
673 super_klass->transitive_interfaces();
674 if (ti != sti && ti != nullptr && !ti->in_aot_cache()) {
675 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
676 }
677 }
678
679 // local interfaces can be empty
680 if (local_interfaces != Universe::the_empty_instance_klass_array() &&
681 local_interfaces != nullptr && !local_interfaces->in_aot_cache()) {
682 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
683 }
684 }
685
686 void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
687 Array<RecordComponent*>* record_components) {
688 if (record_components != nullptr && !record_components->in_aot_cache()) {
689 for (int i = 0; i < record_components->length(); i++) {
690 RecordComponent* record_component = record_components->at(i);
691 MetadataFactory::free_metadata(loader_data, record_component);
692 }
693 MetadataFactory::free_array<RecordComponent*>(loader_data, record_components);
694 }
695 }
696
697 // This function deallocates the metadata and C heap pointers that the
698 // InstanceKlass points to.
699 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
700 // Orphan the mirror first, CMS thinks it's still live.
701 if (java_mirror() != nullptr) {
702 java_lang_Class::set_klass(java_mirror(), nullptr);
703 }
704
705 // Also remove mirror from handles
706 loader_data->remove_handle(_java_mirror);
707
708 // Need to take this class off the class loader data list.
709 loader_data->remove_class(this);
710
711 // The array_klass for this class is created later, after error handling.
712 // For class redefinition, we keep the original class so this scratch class
713 // doesn't have an array class. Either way, assert that there is nothing
714 // to deallocate.
715 assert(array_klasses() == nullptr, "array classes shouldn't be created for this class yet");
716
717 // Release C heap allocated data that this points to, which includes
718 // reference counting symbol names.
719 // Can't release the constant pool or MethodData C heap data here because the constant
720 // pool can be deallocated separately from the InstanceKlass for default methods and
721 // redefine classes. MethodData can also be released separately.
722 release_C_heap_structures(/* release_sub_metadata */ false);
723
724 deallocate_methods(loader_data, methods());
725 set_methods(nullptr);
726
727 deallocate_record_components(loader_data, record_components());
728 set_record_components(nullptr);
729
730 if (method_ordering() != nullptr &&
731 method_ordering() != Universe::the_empty_int_array() &&
732 !method_ordering()->in_aot_cache()) {
733 MetadataFactory::free_array<int>(loader_data, method_ordering());
734 }
735 set_method_ordering(nullptr);
736
737 // default methods can be empty
738 if (default_methods() != nullptr &&
739 default_methods() != Universe::the_empty_method_array() &&
740 !default_methods()->in_aot_cache()) {
741 MetadataFactory::free_array<Method*>(loader_data, default_methods());
742 }
743 // Do NOT deallocate the default methods, they are owned by superinterfaces.
744 set_default_methods(nullptr);
745
746 // default methods vtable indices can be empty
747 if (default_vtable_indices() != nullptr &&
748 !default_vtable_indices()->in_aot_cache()) {
749 MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
750 }
751 set_default_vtable_indices(nullptr);
752
753
754 // This array is in Klass, but remove it with the InstanceKlass since
755 // this place would be the only caller and it can share memory with transitive
756 // interfaces.
757 if (secondary_supers() != nullptr &&
758 secondary_supers() != Universe::the_empty_klass_array() &&
759 // see comments in compute_secondary_supers about the following cast
760 (address)(secondary_supers()) != (address)(transitive_interfaces()) &&
761 !secondary_supers()->in_aot_cache()) {
762 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
763 }
764 set_secondary_supers(nullptr, SECONDARY_SUPERS_BITMAP_EMPTY);
765
766 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
767 set_transitive_interfaces(nullptr);
768 set_local_interfaces(nullptr);
769
770 if (fieldinfo_stream() != nullptr && !fieldinfo_stream()->in_aot_cache()) {
771 MetadataFactory::free_array<u1>(loader_data, fieldinfo_stream());
772 }
773 set_fieldinfo_stream(nullptr);
774
775 if (fieldinfo_search_table() != nullptr && !fieldinfo_search_table()->in_aot_cache()) {
776 MetadataFactory::free_array<u1>(loader_data, fieldinfo_search_table());
777 }
778 set_fieldinfo_search_table(nullptr);
779
780 if (fields_status() != nullptr && !fields_status()->in_aot_cache()) {
781 MetadataFactory::free_array<FieldStatus>(loader_data, fields_status());
782 }
783 set_fields_status(nullptr);
784
785 if (inline_layout_info_array() != nullptr) {
786 MetadataFactory::free_array<InlineLayoutInfo>(loader_data, inline_layout_info_array());
787 }
788 set_inline_layout_info_array(nullptr);
789
790 // If a method from a redefined class is using this constant pool, don't
791 // delete it, yet. The new class's previous version will point to this.
792 if (constants() != nullptr) {
793 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
794 if (!constants()->in_aot_cache()) {
795 HeapShared::remove_scratch_resolved_references(constants());
796 MetadataFactory::free_metadata(loader_data, constants());
797 }
798 // Delete any cached resolution errors for the constant pool
799 SystemDictionary::delete_resolution_error(constants());
800
801 set_constants(nullptr);
802 }
803
804 if (inner_classes() != nullptr &&
805 inner_classes() != Universe::the_empty_short_array() &&
806 !inner_classes()->in_aot_cache()) {
807 MetadataFactory::free_array<jushort>(loader_data, inner_classes());
808 }
809 set_inner_classes(nullptr);
810
811 if (nest_members() != nullptr &&
812 nest_members() != Universe::the_empty_short_array() &&
813 !nest_members()->in_aot_cache()) {
814 MetadataFactory::free_array<jushort>(loader_data, nest_members());
815 }
816 set_nest_members(nullptr);
817
818 if (permitted_subclasses() != nullptr &&
819 permitted_subclasses() != Universe::the_empty_short_array() &&
820 !permitted_subclasses()->in_aot_cache()) {
821 MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
822 }
823 set_permitted_subclasses(nullptr);
824
825 if (loadable_descriptors() != nullptr &&
826 loadable_descriptors() != Universe::the_empty_short_array() &&
827 !loadable_descriptors()->in_aot_cache()) {
828 MetadataFactory::free_array<jushort>(loader_data, loadable_descriptors());
829 }
830 set_loadable_descriptors(nullptr);
831
832 if (acmp_maps_array() != nullptr) {
833 MetadataFactory::free_array<int>(loader_data, acmp_maps_array());
834 }
835 set_acmp_maps_array(nullptr);
836
837 // We should deallocate the Annotations instance if it's not in shared spaces.
838 if (annotations() != nullptr && !annotations()->in_aot_cache()) {
839 MetadataFactory::free_metadata(loader_data, annotations());
840 }
841 set_annotations(nullptr);
842
843 SystemDictionaryShared::handle_class_unloading(this);
844
845 #if INCLUDE_CDS_JAVA_HEAP
846 if (CDSConfig::is_dumping_heap()) {
847 HeapShared::remove_scratch_objects(this);
848 }
849 #endif
850 }
851
852 bool InstanceKlass::is_record() const {
853 return _record_components != nullptr &&
854 is_final() &&
855 super() == vmClasses::Record_klass();
856 }
857
858 bool InstanceKlass::is_sealed() const {
859 return _permitted_subclasses != nullptr &&
860 _permitted_subclasses != Universe::the_empty_short_array();
861 }
862
863 // JLS 8.9: An enum class is either implicitly final and derives
864 // from java.lang.Enum, or else is implicitly sealed to its
865 // anonymous subclasses. This query detects both kinds.
866 // It does not validate the finality or
867 // sealing conditions: it merely checks for a super of Enum.
868 // This is sufficient for recognizing well-formed enums.
869 bool InstanceKlass::is_enum_subclass() const {
870 InstanceKlass* s = super();
871 return (s == vmClasses::Enum_klass() ||
872 (s != nullptr && s->super() == vmClasses::Enum_klass()));
873 }
874
875 bool InstanceKlass::should_be_initialized() const {
876 return !is_initialized();
877 }
878
879 // Static size helper
880 int InstanceKlass::size(int vtable_length,
881 int itable_length,
882 int nonstatic_oop_map_size,
883 bool is_interface,
884 bool is_inline_type) {
885 return align_metadata_size(header_size() +
886 vtable_length +
887 itable_length +
888 nonstatic_oop_map_size +
889 (is_interface ? (int)sizeof(Klass*) / wordSize : 0) +
890 (is_inline_type ? (int)sizeof(InlineKlass::Members) / wordSize : 0));
891 }
892
893 int InstanceKlass::size() const {
894 return size(vtable_length(),
895 itable_length(),
896 nonstatic_oop_map_size(),
897 is_interface(),
898 is_inline_klass());
899 }
900
901 klassItable InstanceKlass::itable() const {
902 return klassItable(const_cast<InstanceKlass*>(this));
903 }
904
905 // JVMTI spec thinks there are signers and protection domain in the
906 // instanceKlass. These accessors pretend these fields are there.
907 // The hprof specification also thinks these fields are in InstanceKlass.
908 oop InstanceKlass::protection_domain() const {
909 // return the protection_domain from the mirror
910 return java_lang_Class::protection_domain(java_mirror());
911 }
912
913 objArrayOop InstanceKlass::signers() const {
914 // return the signers from the mirror
915 return java_lang_Class::signers(java_mirror());
916 }
917
918 oop InstanceKlass::init_lock() const {
919 // return the init lock from the mirror
920 oop lock = java_lang_Class::init_lock(java_mirror());
921 // Prevent reordering with any access of initialization state
922 OrderAccess::loadload();
923 assert(lock != nullptr || !is_not_initialized(), // initialized or in_error state
924 "only fully initialized state can have a null lock");
925 return lock;
926 }
927
928 // Set the initialization lock to null so the object can be GC'ed. Any racing
929 // threads to get this lock will see a null lock and will not lock.
930 // That's okay because they all check for initialized state after getting
931 // the lock and return. For preempted vthreads we keep the oop protected
932 // in the ObjectMonitor (see ObjectMonitor::set_object_strong()).
933 void InstanceKlass::fence_and_clear_init_lock() {
934 // make sure previous stores are all done, notably the init_state.
935 OrderAccess::storestore();
936 java_lang_Class::clear_init_lock(java_mirror());
937 assert(!is_not_initialized(), "class must be initialized now");
938 }
939
940 class PreemptableInitCall {
941 JavaThread* _thread;
942 bool _previous;
943 DEBUG_ONLY(InstanceKlass* _previous_klass;)
944 public:
945 PreemptableInitCall(JavaThread* thread, InstanceKlass* ik) : _thread(thread) {
946 _previous = thread->at_preemptable_init();
947 _thread->set_at_preemptable_init(true);
948 DEBUG_ONLY(_previous_klass = _thread->preempt_init_klass();)
949 DEBUG_ONLY(_thread->set_preempt_init_klass(ik));
950 }
951 ~PreemptableInitCall() {
952 _thread->set_at_preemptable_init(_previous);
953 DEBUG_ONLY(_thread->set_preempt_init_klass(_previous_klass));
954 }
955 };
956
957 void InstanceKlass::initialize_preemptable(TRAPS) {
958 if (this->should_be_initialized()) {
959 PreemptableInitCall pic(THREAD, this);
960 initialize_impl(THREAD);
961 } else {
962 assert(is_initialized(), "sanity check");
963 }
964 }
965
966 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
967 // process. The step comments refers to the procedure described in that section.
968 // Note: implementation moved to static method to expose the this pointer.
969 void InstanceKlass::initialize(TRAPS) {
970 if (this->should_be_initialized()) {
971 initialize_impl(CHECK);
972 // Note: at this point the class may be initialized
973 // OR it may be in the state of being initialized
974 // in case of recursive initialization!
975 } else {
976 assert(is_initialized(), "sanity check");
977 }
978 }
979
980 #ifdef ASSERT
981 void InstanceKlass::assert_no_clinit_will_run_for_aot_initialized_class() const {
982 assert(has_aot_initialized_mirror(), "must be");
983
984 InstanceKlass* s = super();
985 if (s != nullptr) {
986 DEBUG_ONLY(ResourceMark rm);
987 assert(s->is_initialized(), "super class %s of aot-inited class %s must have been initialized",
988 s->external_name(), external_name());
989 s->assert_no_clinit_will_run_for_aot_initialized_class();
990 }
991
992 Array<InstanceKlass*>* interfaces = local_interfaces();
993 int len = interfaces->length();
994 for (int i = 0; i < len; i++) {
995 InstanceKlass* intf = interfaces->at(i);
996 if (!intf->is_initialized()) {
997 ResourceMark rm;
998 // Note: an interface needs to be marked as is_initialized() only if
999 // - it has a <clinit>
1000 // - it has declared a default method.
1001 assert(!intf->interface_needs_clinit_execution_as_super(/*also_check_supers*/false),
1002 "uninitialized super interface %s of aot-inited class %s must not have <clinit>",
1003 intf->external_name(), external_name());
1004 }
1005 }
1006 }
1007 #endif
1008
1009 #if INCLUDE_CDS
1010 // early_init -- we are moving this class into the fully_initialized state before the
1011 // JVM is able to execute any bytecodes. See AOTLinkedClassBulkLoader::is_initializing_classes_early().
1012 void InstanceKlass::initialize_with_aot_initialized_mirror(bool early_init, TRAPS) {
1013 assert(has_aot_initialized_mirror(), "must be");
1014 assert(CDSConfig::is_loading_heap(), "must be");
1015 assert(CDSConfig::is_using_aot_linked_classes(), "must be");
1016 assert_no_clinit_will_run_for_aot_initialized_class();
1017
1018 if (is_initialized()) {
1019 return;
1020 }
1021
1022 if (log_is_enabled(Info, aot, init)) {
1023 ResourceMark rm;
1024 log_info(aot, init)("%s (aot-inited%s)", external_name(), early_init ? ", early" : "");
1025 }
1026
1027 if (is_runtime_setup_required()) {
1028 assert(!early_init, "must not call");
1029 // Need to take the slow path, which will call the runtimeSetup() function instead
1030 // of <clinit>
1031 initialize(CHECK);
1032 return;
1033 }
1034
1035 LogTarget(Info, class, init) lt;
1036 if (lt.is_enabled()) {
1037 ResourceMark rm(THREAD);
1038 LogStream ls(lt);
1039 ls.print("%d Initializing ", call_class_initializer_counter++);
1040 name()->print_value_on(&ls);
1041 ls.print_cr("(aot-inited) (" PTR_FORMAT ") by thread \"%s\"",
1042 p2i(this), THREAD->name());
1043 }
1044
1045 if (early_init) {
1046 precond(AOTLinkedClassBulkLoader::is_initializing_classes_early());
1047 precond(is_linked());
1048 precond(init_thread() == nullptr);
1049 set_init_state(fully_initialized);
1050 fence_and_clear_init_lock();
1051 return;
1052 }
1053
1054 link_class(CHECK);
1055
1056 #ifdef ASSERT
1057 {
1058 Handle h_init_lock(THREAD, init_lock());
1059 ObjectLocker ol(h_init_lock, THREAD);
1060 assert(!is_initialized(), "sanity");
1061 assert(!is_being_initialized(), "sanity");
1062 assert(!is_in_error_state(), "sanity");
1063 }
1064 #endif
1065
1066 set_init_thread(THREAD);
1067 set_initialization_state_and_notify(fully_initialized, CHECK);
1068 }
1069 #endif
1070
1071 bool InstanceKlass::verify_code(TRAPS) {
1072 // 1) Verify the bytecodes
1073 return Verifier::verify(this, should_verify_class(), THREAD);
1074 }
1075
1076 static void load_classes_from_loadable_descriptors_attribute(InstanceKlass *ik, TRAPS) {
1077 if (ik->loadable_descriptors() != Universe::the_empty_short_array() && PreloadClasses) {
1078 ResourceMark rm(THREAD);
1079 HandleMark hm(THREAD);
1080 for (int i = 0; i < ik->loadable_descriptors()->length(); i++) {
1081 Symbol* sig = ik->constants()->symbol_at(ik->loadable_descriptors()->at(i));
1082 if (!Signature::has_envelope(sig)) continue;
1083 TempNewSymbol class_name = Signature::strip_envelope(sig);
1084 if (class_name == ik->name()) continue;
1085 log_info(class, preload)("Preloading of class %s during linking of class %s "
1086 "because of the class is listed in the LoadableDescriptors attribute",
1087 sig->as_C_string(), ik->name()->as_C_string());
1088 oop loader = ik->class_loader();
1089 Klass* klass = SystemDictionary::resolve_or_null(class_name,
1090 Handle(THREAD, loader), THREAD);
1091 if (HAS_PENDING_EXCEPTION) {
1092 CLEAR_PENDING_EXCEPTION;
1093 }
1094 if (klass != nullptr) {
1095 log_info(class, preload)("Preloading of class %s during linking of class %s "
1096 "(cause: LoadableDescriptors attribute) succeeded",
1097 class_name->as_C_string(), ik->name()->as_C_string());
1098 if (!klass->is_inline_klass()) {
1099 // Non value class are allowed by the current spec, but it could be an indication
1100 // of an issue so let's log a warning
1101 log_info(class, preload)("Preloading of class %s during linking of class %s "
1102 "(cause: LoadableDescriptors attribute) but loaded class is not a value class",
1103 class_name->as_C_string(), ik->name()->as_C_string());
1104 }
1105 } else {
1106 log_info(class, preload)("Preloading of class %s during linking of class %s "
1107 "(cause: LoadableDescriptors attribute) failed",
1108 class_name->as_C_string(), ik->name()->as_C_string());
1109 }
1110 }
1111 }
1112 }
1113
1114 void InstanceKlass::link_class(TRAPS) {
1115 assert(is_loaded(), "must be loaded");
1116 if (!is_linked()) {
1117 link_class_impl(CHECK);
1118 }
1119 }
1120
1121 // Called to verify that a class can link during initialization, without
1122 // throwing a VerifyError.
1123 bool InstanceKlass::link_class_or_fail(TRAPS) {
1124 assert(is_loaded(), "must be loaded");
1125 if (!is_linked()) {
1126 link_class_impl(CHECK_false);
1127 }
1128 return is_linked();
1129 }
1130
1131 bool InstanceKlass::link_class_impl(TRAPS) {
1132 if (CDSConfig::is_dumping_static_archive() && SystemDictionaryShared::has_class_failed_verification(this)) {
1133 // This is for CDS static dump only -- we use the in_error_state to indicate that
1134 // the class has failed verification. Throwing the NoClassDefFoundError here is just
1135 // a convenient way to stop repeat attempts to verify the same (bad) class.
1136 //
1137 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
1138 // if we are executing Java code. This is not a problem for CDS dumping phase since
1139 // it doesn't execute any Java code.
1140 ResourceMark rm(THREAD);
1141 // Names are all known to be < 64k so we know this formatted message is not excessively large.
1142 Exceptions::fthrow(THREAD_AND_LOCATION,
1143 vmSymbols::java_lang_NoClassDefFoundError(),
1144 "Class %s, or one of its supertypes, failed class initialization",
1145 external_name());
1146 return false;
1147 }
1148 // return if already verified
1149 if (is_linked()) {
1150 return true;
1151 }
1152
1153 // Timing
1154 // timer handles recursion
1155 JavaThread* jt = THREAD;
1156
1157 // link super class before linking this class
1158 InstanceKlass* super_klass = super();
1159 if (super_klass != nullptr) {
1160 if (super_klass->is_interface()) { // check if super class is an interface
1161 ResourceMark rm(THREAD);
1162 // Names are all known to be < 64k so we know this formatted message is not excessively large.
1163 Exceptions::fthrow(
1164 THREAD_AND_LOCATION,
1165 vmSymbols::java_lang_IncompatibleClassChangeError(),
1166 "class %s has interface %s as super class",
1167 external_name(),
1168 super_klass->external_name()
1169 );
1170 return false;
1171 }
1172
1173 super_klass->link_class_impl(CHECK_false);
1174 }
1175
1176 // link all interfaces implemented by this class before linking this class
1177 Array<InstanceKlass*>* interfaces = local_interfaces();
1178 int num_interfaces = interfaces->length();
1179 for (int index = 0; index < num_interfaces; index++) {
1180 InstanceKlass* interk = interfaces->at(index);
1181 interk->link_class_impl(CHECK_false);
1182 }
1183
1184 if (Arguments::is_valhalla_enabled()) {
1185 // Aggressively preloading all classes from the LoadableDescriptors attribute
1186 // so inline classes can be scalarized in the calling conventions computed below
1187 load_classes_from_loadable_descriptors_attribute(this, THREAD);
1188 assert(!HAS_PENDING_EXCEPTION, "Shouldn't have pending exceptions from call above");
1189 }
1190
1191 // in case the class is linked in the process of linking its superclasses
1192 if (is_linked()) {
1193 return true;
1194 }
1195
1196 // trace only the link time for this klass that includes
1197 // the verification time
1198 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
1199 ClassLoader::perf_class_link_selftime(),
1200 ClassLoader::perf_classes_linked(),
1201 jt->get_thread_stat()->perf_recursion_counts_addr(),
1202 jt->get_thread_stat()->perf_timers_addr(),
1203 PerfClassTraceTime::CLASS_LINK);
1204
1205 // verification & rewriting
1206 {
1207 HandleMark hm(THREAD);
1208 Handle h_init_lock(THREAD, init_lock());
1209 ObjectLocker ol(h_init_lock, CHECK_PREEMPTABLE_false);
1210 // Don't allow preemption if we link/initialize classes below,
1211 // since that would release this monitor while we are in the
1212 // middle of linking this class.
1213 NoPreemptMark npm(THREAD);
1214
1215 // rewritten will have been set if loader constraint error found
1216 // on an earlier link attempt
1217 // don't verify or rewrite if already rewritten
1218 //
1219
1220 if (!is_linked()) {
1221 if (!is_rewritten()) {
1222 if (in_aot_cache()) {
1223 assert(!verified_at_dump_time(), "must be");
1224 }
1225 {
1226 bool verify_ok = verify_code(THREAD);
1227 if (!verify_ok) {
1228 return false;
1229 }
1230 }
1231
1232 // Just in case a side-effect of verify linked this class already
1233 // (which can sometimes happen since the verifier loads classes
1234 // using custom class loaders, which are free to initialize things)
1235 if (is_linked()) {
1236 return true;
1237 }
1238
1239 // also sets rewritten
1240 rewrite_class(CHECK_false);
1241 } else if (in_aot_cache()) {
1242 SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
1243 }
1244
1245 // relocate jsrs and link methods after they are all rewritten
1246 link_methods(CHECK_false);
1247
1248 // Initialize the vtable and interface table after
1249 // methods have been rewritten since rewrite may
1250 // fabricate new Method*s.
1251 // also does loader constraint checking
1252 //
1253 // initialize_vtable and initialize_itable need to be rerun
1254 // for a shared class if
1255 // 1) the class is loaded by custom class loader or
1256 // 2) the class is loaded by built-in class loader but failed to add archived loader constraints or
1257 // 3) the class was not verified during dump time
1258 bool need_init_table = true;
1259 if (in_aot_cache() && verified_at_dump_time() &&
1260 SystemDictionaryShared::check_linking_constraints(THREAD, this)) {
1261 need_init_table = false;
1262 }
1263 if (need_init_table) {
1264 vtable().initialize_vtable_and_check_constraints(CHECK_false);
1265 itable().initialize_itable_and_check_constraints(CHECK_false);
1266 }
1267 #ifdef ASSERT
1268 vtable().verify(tty, true);
1269 // In case itable verification is ever added.
1270 // itable().verify(tty, true);
1271 #endif
1272 if (Universe::is_fully_initialized()) {
1273 DeoptimizationScope deopt_scope;
1274 {
1275 // Now mark all code that assumes the class is not linked.
1276 // Set state under the Compile_lock also.
1277 MutexLocker ml(THREAD, Compile_lock);
1278
1279 set_init_state(linked);
1280 CodeCache::mark_dependents_on(&deopt_scope, this);
1281 }
1282 // Perform the deopt handshake outside Compile_lock.
1283 deopt_scope.deoptimize_marked();
1284 } else {
1285 set_init_state(linked);
1286 }
1287 if (JvmtiExport::should_post_class_prepare()) {
1288 JvmtiExport::post_class_prepare(THREAD, this);
1289 }
1290 }
1291 }
1292
1293 if (log_is_enabled(Info, class, link)) {
1294 ResourceMark rm(THREAD);
1295 log_info(class, link)("Linked class %s", external_name());
1296 }
1297
1298 return true;
1299 }
1300
1301 // Rewrite the byte codes of all of the methods of a class.
1302 // The rewriter must be called exactly once. Rewriting must happen after
1303 // verification but before the first method of the class is executed.
1304 void InstanceKlass::rewrite_class(TRAPS) {
1305 assert(is_loaded(), "must be loaded");
1306 if (is_rewritten()) {
1307 assert(in_aot_cache(), "rewriting an unshared class?");
1308 return;
1309 }
1310 Rewriter::rewrite(this, CHECK);
1311 set_rewritten();
1312 }
1313
1314 // Now relocate and link method entry points after class is rewritten.
1315 // This is outside is_rewritten flag. In case of an exception, it can be
1316 // executed more than once.
1317 void InstanceKlass::link_methods(TRAPS) {
1318 PerfTraceTime timer(ClassLoader::perf_ik_link_methods_time());
1319
1320 int len = methods()->length();
1321 for (int i = len-1; i >= 0; i--) {
1322 methodHandle m(THREAD, methods()->at(i));
1323
1324 // Set up method entry points for compiler and interpreter .
1325 m->link_method(m, CHECK);
1326 }
1327 }
1328
1329 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
1330 void InstanceKlass::initialize_super_interfaces(TRAPS) {
1331 assert (has_nonstatic_concrete_methods(), "caller should have checked this");
1332 for (int i = 0; i < local_interfaces()->length(); ++i) {
1333 InstanceKlass* ik = local_interfaces()->at(i);
1334
1335 // Initialization is depth first search ie. we start with top of the inheritance tree
1336 // has_nonstatic_concrete_methods drives searching superinterfaces since it
1337 // means has_nonstatic_concrete_methods in its superinterface hierarchy
1338 if (ik->has_nonstatic_concrete_methods()) {
1339 ik->initialize_super_interfaces(CHECK);
1340 }
1341
1342 // Only initialize() interfaces that "declare" concrete methods.
1343 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
1344 ik->initialize(CHECK);
1345 }
1346 }
1347 }
1348
1349 using InitializationErrorTable = HashTable<const InstanceKlass*, OopHandle, 107, AnyObj::C_HEAP, mtClass>;
1350 static InitializationErrorTable* _initialization_error_table;
1351
1352 void InstanceKlass::add_initialization_error(JavaThread* current, Handle exception) {
1353 // Create the same exception with a message indicating the thread name,
1354 // and the StackTraceElements.
1355 Handle init_error = java_lang_Throwable::create_initialization_error(current, exception);
1356 ResourceMark rm(current);
1357 if (init_error.is_null()) {
1358 log_trace(class, init)("Unable to create the desired initialization error for class %s", external_name());
1359
1360 // We failed to create the new exception, most likely due to either out-of-memory or
1361 // a stackoverflow error. If the original exception was either of those then we save
1362 // the shared, pre-allocated, stackless, instance of that exception.
1363 if (exception->klass() == vmClasses::StackOverflowError_klass()) {
1364 log_debug(class, init)("Using shared StackOverflowError as initialization error for class %s", external_name());
1365 init_error = Handle(current, Universe::class_init_stack_overflow_error());
1366 } else if (exception->klass() == vmClasses::OutOfMemoryError_klass()) {
1367 log_debug(class, init)("Using shared OutOfMemoryError as initialization error for class %s", external_name());
1368 init_error = Handle(current, Universe::class_init_out_of_memory_error());
1369 } else {
1370 return;
1371 }
1372 }
1373
1374 MutexLocker ml(current, ClassInitError_lock);
1375 OopHandle elem = OopHandle(Universe::vm_global(), init_error());
1376 bool created;
1377 if (_initialization_error_table == nullptr) {
1378 _initialization_error_table = new (mtClass) InitializationErrorTable();
1379 }
1380 _initialization_error_table->put_if_absent(this, elem, &created);
1381 assert(created, "Initialization is single threaded");
1382 log_trace(class, init)("Initialization error added for class %s", external_name());
1383 }
1384
1385 oop InstanceKlass::get_initialization_error(JavaThread* current) {
1386 MutexLocker ml(current, ClassInitError_lock);
1387 if (_initialization_error_table == nullptr) {
1388 return nullptr;
1389 }
1390 OopHandle* h = _initialization_error_table->get(this);
1391 return (h != nullptr) ? h->resolve() : nullptr;
1392 }
1393
1394 // Need to remove entries for unloaded classes.
1395 void InstanceKlass::clean_initialization_error_table() {
1396 struct InitErrorTableCleaner {
1397 bool do_entry(const InstanceKlass* ik, OopHandle h) {
1398 if (!ik->is_loader_alive()) {
1399 h.release(Universe::vm_global());
1400 return true;
1401 } else {
1402 return false;
1403 }
1404 }
1405 };
1406
1407 assert_locked_or_safepoint(ClassInitError_lock);
1408 InitErrorTableCleaner cleaner;
1409 if (_initialization_error_table != nullptr) {
1410 _initialization_error_table->unlink(&cleaner);
1411 }
1412 }
1413
1414 class ThreadWaitingForClassInit : public StackObj {
1415 JavaThread* _thread;
1416 public:
1417 ThreadWaitingForClassInit(JavaThread* thread, InstanceKlass* ik) : _thread(thread) {
1418 _thread->set_class_to_be_initialized(ik);
1419 }
1420 ~ThreadWaitingForClassInit() {
1421 _thread->set_class_to_be_initialized(nullptr);
1422 }
1423 };
1424
1425 void InstanceKlass::initialize_impl(TRAPS) {
1426 HandleMark hm(THREAD);
1427
1428 // Make sure klass is linked (verified) before initialization
1429 // A class could already be verified, since it has been reflected upon.
1430 link_class(CHECK);
1431
1432 DTRACE_CLASSINIT_PROBE(required, -1);
1433
1434 bool wait = false;
1435
1436 JavaThread* jt = THREAD;
1437
1438 bool debug_logging_enabled = log_is_enabled(Debug, class, init);
1439
1440 // refer to the JVM book page 47 for description of steps
1441 // Step 1
1442 {
1443 Handle h_init_lock(THREAD, init_lock());
1444 ObjectLocker ol(h_init_lock, CHECK_PREEMPTABLE);
1445
1446 // Step 2
1447 // If we were to use wait() instead of waitInterruptibly() then
1448 // we might end up throwing IE from link/symbol resolution sites
1449 // that aren't expected to throw. This would wreak havoc. See 6320309.
1450 while (is_being_initialized() && !is_reentrant_initialization(jt)) {
1451 if (debug_logging_enabled) {
1452 ResourceMark rm(jt);
1453 log_debug(class, init)("Thread \"%s\" waiting for initialization of %s by thread \"%s\"",
1454 jt->name(), external_name(), init_thread_name());
1455 }
1456 wait = true;
1457 ThreadWaitingForClassInit twcl(THREAD, this);
1458 ol.wait_uninterruptibly(CHECK_PREEMPTABLE);
1459 }
1460
1461 // Step 3
1462 if (is_being_initialized() && is_reentrant_initialization(jt)) {
1463 if (debug_logging_enabled) {
1464 ResourceMark rm(jt);
1465 log_debug(class, init)("Thread \"%s\" recursively initializing %s",
1466 jt->name(), external_name());
1467 }
1468 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
1469 return;
1470 }
1471
1472 // Step 4
1473 if (is_initialized()) {
1474 if (debug_logging_enabled) {
1475 ResourceMark rm(jt);
1476 log_debug(class, init)("Thread \"%s\" found %s already initialized",
1477 jt->name(), external_name());
1478 }
1479 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
1480 return;
1481 }
1482
1483 // Step 5
1484 if (is_in_error_state()) {
1485 if (debug_logging_enabled) {
1486 ResourceMark rm(jt);
1487 log_debug(class, init)("Thread \"%s\" found %s is in error state",
1488 jt->name(), external_name());
1489 }
1490
1491 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
1492 ResourceMark rm(THREAD);
1493 Handle cause(THREAD, get_initialization_error(THREAD));
1494
1495 stringStream ss;
1496 ss.print("Could not initialize class %s", external_name());
1497 if (cause.is_null()) {
1498 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string());
1499 } else {
1500 THROW_MSG_CAUSE(vmSymbols::java_lang_NoClassDefFoundError(),
1501 ss.as_string(), cause);
1502 }
1503 } else {
1504
1505 // Step 6
1506 set_init_state(being_initialized);
1507 set_init_thread(jt);
1508 if (debug_logging_enabled) {
1509 ResourceMark rm(jt);
1510 log_debug(class, init)("Thread \"%s\" is initializing %s",
1511 jt->name(), external_name());
1512 }
1513 }
1514 }
1515
1516 // Block preemption once we are the initializer thread. Unmounting now
1517 // would complicate the reentrant case (identity is platform thread).
1518 NoPreemptMark npm(THREAD);
1519
1520 // Pre-allocating an all-zero value to be used to reset nullable flat storages
1521 if (is_inline_klass()) {
1522 InlineKlass* vk = InlineKlass::cast(this);
1523 if (vk->supports_nullable_layouts()) {
1524 oop val = vk->allocate_instance(THREAD);
1525 if (HAS_PENDING_EXCEPTION) {
1526 Handle e(THREAD, PENDING_EXCEPTION);
1527 CLEAR_PENDING_EXCEPTION;
1528 {
1529 EXCEPTION_MARK;
1530 add_initialization_error(THREAD, e);
1531 // Locks object, set state, and notify all waiting threads
1532 set_initialization_state_and_notify(initialization_error, THREAD);
1533 CLEAR_PENDING_EXCEPTION;
1534 }
1535 THROW_OOP(e());
1536 }
1537 vk->set_null_reset_value(val);
1538 }
1539 }
1540
1541 // Step 7
1542 // Next, if C is a class rather than an interface, initialize it's super class and super
1543 // interfaces.
1544 if (!is_interface()) {
1545 Klass* super_klass = super();
1546 if (super_klass != nullptr && super_klass->should_be_initialized()) {
1547 super_klass->initialize(THREAD);
1548 }
1549 // If C implements any interface that declares a non-static, concrete method,
1550 // the initialization of C triggers initialization of its super interfaces.
1551 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
1552 // having a superinterface that declares, non-static, concrete methods
1553 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) {
1554 initialize_super_interfaces(THREAD);
1555 }
1556
1557 // If any exceptions, complete abruptly, throwing the same exception as above.
1558 if (HAS_PENDING_EXCEPTION) {
1559 Handle e(THREAD, PENDING_EXCEPTION);
1560 CLEAR_PENDING_EXCEPTION;
1561 {
1562 EXCEPTION_MARK;
1563 add_initialization_error(THREAD, e);
1564 // Locks object, set state, and notify all waiting threads
1565 set_initialization_state_and_notify(initialization_error, THREAD);
1566 CLEAR_PENDING_EXCEPTION;
1567 }
1568 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1569 THROW_OOP(e());
1570 }
1571 }
1572
1573
1574 // Step 8
1575 {
1576 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1577 if (class_initializer() != nullptr) {
1578 // Timer includes any side effects of class initialization (resolution,
1579 // etc), but not recursive entry into call_class_initializer().
1580 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1581 ClassLoader::perf_class_init_selftime(),
1582 ClassLoader::perf_classes_inited(),
1583 jt->get_thread_stat()->perf_recursion_counts_addr(),
1584 jt->get_thread_stat()->perf_timers_addr(),
1585 PerfClassTraceTime::CLASS_CLINIT);
1586 call_class_initializer(THREAD);
1587 } else {
1588 // The elapsed time is so small it's not worth counting.
1589 if (UsePerfData) {
1590 ClassLoader::perf_classes_inited()->inc();
1591 }
1592 call_class_initializer(THREAD);
1593 }
1594
1595 if (has_strict_static_fields() && !HAS_PENDING_EXCEPTION) {
1596 // Step 9 also verifies that strict static fields have been initialized.
1597 // Status bits were set in ClassFileParser::post_process_parsed_stream.
1598 // After <clinit>, bits must all be clear, or else we must throw an error.
1599 // This is an extremely fast check, so we won't bother with a timer.
1600 assert(fields_status() != nullptr, "");
1601 Symbol* bad_strict_static = nullptr;
1602 for (int index = 0; index < fields_status()->length(); index++) {
1603 // Very fast loop over single byte array looking for a set bit.
1604 if (fields_status()->adr_at(index)->is_strict_static_unset()) {
1605 // This strict static field has not been set by the class initializer.
1606 // Note that in the common no-error case, we read no field metadata.
1607 // We only unpack it when we need to report an error.
1608 FieldInfo fi = field(index);
1609 bad_strict_static = fi.name(constants());
1610 if (debug_logging_enabled) {
1611 ResourceMark rm(jt);
1612 const char* msg = format_strict_static_message(bad_strict_static);
1613 log_debug(class, init)("%s", msg);
1614 } else {
1615 // If we are not logging, do not bother to look for a second offense.
1616 break;
1617 }
1618 }
1619 }
1620 if (bad_strict_static != nullptr) {
1621 throw_strict_static_exception(bad_strict_static, "is unset after initialization of", THREAD);
1622 }
1623 }
1624 }
1625
1626 // Step 9
1627 if (!HAS_PENDING_EXCEPTION) {
1628 set_initialization_state_and_notify(fully_initialized, CHECK);
1629 DEBUG_ONLY(vtable().verify(tty, true);)
1630 CompilationPolicy::replay_training_at_init(this, THREAD);
1631 }
1632 else {
1633 // Step 10 and 11
1634 Handle e(THREAD, PENDING_EXCEPTION);
1635 CLEAR_PENDING_EXCEPTION;
1636 // JVMTI has already reported the pending exception
1637 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1638 JvmtiExport::clear_detected_exception(jt);
1639 {
1640 EXCEPTION_MARK;
1641 add_initialization_error(THREAD, e);
1642 set_initialization_state_and_notify(initialization_error, THREAD);
1643 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below
1644 // JVMTI has already reported the pending exception
1645 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1646 JvmtiExport::clear_detected_exception(jt);
1647 }
1648 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1649 if (e->is_a(vmClasses::Error_klass())) {
1650 THROW_OOP(e());
1651 } else {
1652 JavaCallArguments args(e);
1653 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
1654 vmSymbols::throwable_void_signature(),
1655 &args);
1656 }
1657 }
1658 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1659 }
1660
1661
1662 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
1663 Handle h_init_lock(THREAD, init_lock());
1664 if (h_init_lock() != nullptr) {
1665 ObjectLocker ol(h_init_lock, THREAD);
1666 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1667 set_init_state(state);
1668 fence_and_clear_init_lock();
1669 ol.notify_all(CHECK);
1670 } else {
1671 assert(h_init_lock() != nullptr, "The initialization state should never be set twice");
1672 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1673 set_init_state(state);
1674 }
1675 }
1676
1677 void InstanceKlass::notify_strict_static_access(int field_index, bool is_writing, TRAPS) {
1678 guarantee(field_index >= 0 && field_index < fields_status()->length(), "valid field index");
1679 DEBUG_ONLY(FieldInfo debugfi = field(field_index));
1680 assert(debugfi.access_flags().is_strict(), "");
1681 assert(debugfi.access_flags().is_static(), "");
1682 FieldStatus& fs = *fields_status()->adr_at(field_index);
1683 LogTarget(Trace, class, init) lt;
1684 if (lt.is_enabled()) {
1685 ResourceMark rm(THREAD);
1686 LogStream ls(lt);
1687 FieldInfo fi = field(field_index);
1688 ls.print("notify %s %s %s%s ",
1689 external_name(), is_writing? "Write" : "Read",
1690 fs.is_strict_static_unset() ? "Unset" : "(set)",
1691 fs.is_strict_static_unread() ? "+Unread" : "");
1692 fi.print(&ls, constants());
1693 }
1694 if (fs.is_strict_static_unset()) {
1695 assert(fs.is_strict_static_unread(), "ClassFileParser resp.");
1696 // If it is not set, there are only two reasonable things we can do here:
1697 // - mark it set if this is putstatic
1698 // - throw an error (Read-Before-Write) if this is getstatic
1699
1700 // The unset state is (or should be) transient, and observable only in one
1701 // thread during the execution of <clinit>. Something is wrong here as this
1702 // should not be possible
1703 guarantee(is_reentrant_initialization(THREAD), "unscoped access to strict static");
1704 if (is_writing) {
1705 // clear the "unset" bit, since the field is actually going to be written
1706 fs.update_strict_static_unset(false);
1707 } else {
1708 // throw an IllegalStateException, since we are reading before writing
1709 // see also InstanceKlass::initialize_impl, Step 8 (at end)
1710 Symbol* bad_strict_static = field(field_index).name(constants());
1711 throw_strict_static_exception(bad_strict_static, "is unset before first read in", CHECK);
1712 }
1713 } else {
1714 // Ensure no write after read for final strict statics
1715 FieldInfo fi = field(field_index);
1716 bool is_final = fi.access_flags().is_final();
1717 if (is_final) {
1718 // no final write after read, so observing a constant freezes it, as if <clinit> ended early
1719 // (maybe we could trust the constant a little earlier, before <clinit> ends)
1720 if (is_writing && !fs.is_strict_static_unread()) {
1721 Symbol* bad_strict_static = fi.name(constants());
1722 throw_strict_static_exception(bad_strict_static, "is set after read (as final) in", CHECK);
1723 } else if (!is_writing && fs.is_strict_static_unread()) {
1724 fs.update_strict_static_unread(false);
1725 }
1726 }
1727 }
1728 }
1729
1730 void InstanceKlass::throw_strict_static_exception(Symbol* field_name, const char* when, TRAPS) {
1731 ResourceMark rm(THREAD);
1732 const char* msg = format_strict_static_message(field_name, when);
1733 THROW_MSG(vmSymbols::java_lang_IllegalStateException(), msg);
1734 }
1735
1736 const char* InstanceKlass::format_strict_static_message(Symbol* field_name, const char* when) {
1737 stringStream ss;
1738 ss.print("Strict static \"%s\" %s %s",
1739 field_name->as_C_string(),
1740 when == nullptr ? "is unset in" : when,
1741 external_name());
1742 return ss.as_string();
1743 }
1744
1745 // Update hierarchy. This is done before the new klass has been added to the SystemDictionary. The Compile_lock
1746 // is grabbed, to ensure that the compiler is not using the class hierarchy.
1747 void InstanceKlass::add_to_hierarchy(JavaThread* current) {
1748 assert(!SafepointSynchronize::is_at_safepoint(), "must NOT be at safepoint");
1749
1750 DeoptimizationScope deopt_scope;
1751 {
1752 MutexLocker ml(current, Compile_lock);
1753
1754 set_init_state(InstanceKlass::loaded);
1755 // make sure init_state store is already done.
1756 // The compiler reads the hierarchy outside of the Compile_lock.
1757 // Access ordering is used to add to hierarchy.
1758
1759 // Link into hierarchy.
1760 append_to_sibling_list(); // add to superklass/sibling list
1761 process_interfaces(); // handle all "implements" declarations
1762
1763 // Now mark all code that depended on old class hierarchy.
1764 // Note: must be done *after* linking k into the hierarchy (was bug 12/9/97)
1765 if (Universe::is_fully_initialized()) {
1766 CodeCache::mark_dependents_on(&deopt_scope, this);
1767 }
1768 }
1769 // Perform the deopt handshake outside Compile_lock.
1770 deopt_scope.deoptimize_marked();
1771 }
1772
1773
1774 InstanceKlass* InstanceKlass::implementor() const {
1775 InstanceKlass* volatile* ik = adr_implementor();
1776 if (ik == nullptr) {
1777 return nullptr;
1778 } else {
1779 // This load races with inserts, and therefore needs acquire.
1780 InstanceKlass* ikls = AtomicAccess::load_acquire(ik);
1781 if (ikls != nullptr && !ikls->is_loader_alive()) {
1782 return nullptr; // don't return unloaded class
1783 } else {
1784 return ikls;
1785 }
1786 }
1787 }
1788
1789
1790 void InstanceKlass::set_implementor(InstanceKlass* ik) {
1791 assert_locked_or_safepoint(Compile_lock);
1792 assert(is_interface(), "not interface");
1793 InstanceKlass* volatile* addr = adr_implementor();
1794 assert(addr != nullptr, "null addr");
1795 if (addr != nullptr) {
1796 AtomicAccess::release_store(addr, ik);
1797 }
1798 }
1799
1800 int InstanceKlass::nof_implementors() const {
1801 InstanceKlass* ik = implementor();
1802 if (ik == nullptr) {
1803 return 0;
1804 } else if (ik != this) {
1805 return 1;
1806 } else {
1807 return 2;
1808 }
1809 }
1810
1811 // The embedded _implementor field can only record one implementor.
1812 // When there are more than one implementors, the _implementor field
1813 // is set to the interface Klass* itself. Following are the possible
1814 // values for the _implementor field:
1815 // null - no implementor
1816 // implementor Klass* - one implementor
1817 // self - more than one implementor
1818 //
1819 // The _implementor field only exists for interfaces.
1820 void InstanceKlass::add_implementor(InstanceKlass* ik) {
1821 if (Universe::is_fully_initialized()) {
1822 assert_lock_strong(Compile_lock);
1823 }
1824 assert(is_interface(), "not interface");
1825 // Filter out my subinterfaces.
1826 // (Note: Interfaces are never on the subklass list.)
1827 if (ik->is_interface()) return;
1828
1829 // Filter out subclasses whose supers already implement me.
1830 // (Note: CHA must walk subclasses of direct implementors
1831 // in order to locate indirect implementors.)
1832 InstanceKlass* super_ik = ik->super();
1833 if (super_ik != nullptr && super_ik->implements_interface(this))
1834 // We only need to check one immediate superclass, since the
1835 // implements_interface query looks at transitive_interfaces.
1836 // Any supers of the super have the same (or fewer) transitive_interfaces.
1837 return;
1838
1839 InstanceKlass* iklass = implementor();
1840 if (iklass == nullptr) {
1841 set_implementor(ik);
1842 } else if (iklass != this && iklass != ik) {
1843 // There is already an implementor. Use itself as an indicator of
1844 // more than one implementors.
1845 set_implementor(this);
1846 }
1847
1848 // The implementor also implements the transitive_interfaces
1849 for (int index = 0; index < local_interfaces()->length(); index++) {
1850 local_interfaces()->at(index)->add_implementor(ik);
1851 }
1852 }
1853
1854 void InstanceKlass::init_implementor() {
1855 if (is_interface()) {
1856 set_implementor(nullptr);
1857 }
1858 }
1859
1860
1861 void InstanceKlass::process_interfaces() {
1862 // link this class into the implementors list of every interface it implements
1863 for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1864 assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
1865 InstanceKlass* interf = local_interfaces()->at(i);
1866 assert(interf->is_interface(), "expected interface");
1867 interf->add_implementor(this);
1868 }
1869 }
1870
1871 bool InstanceKlass::can_be_primary_super_slow() const {
1872 if (is_interface())
1873 return false;
1874 else
1875 return Klass::can_be_primary_super_slow();
1876 }
1877
1878 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1879 Array<InstanceKlass*>* transitive_interfaces) {
1880 // The secondaries are the implemented interfaces.
1881 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1882 // (but it's safe to do here because we won't write into _secondary_supers from this point on).
1883 Array<Klass*>* interfaces = (Array<Klass*>*)(address)transitive_interfaces;
1884 int num_secondaries = num_extra_slots + interfaces->length();
1885 if (num_secondaries == 0) {
1886 // Must share this for correct bootstrapping!
1887 set_secondary_supers(Universe::the_empty_klass_array(), Universe::the_empty_klass_bitmap());
1888 return nullptr;
1889 } else if (num_extra_slots == 0 && interfaces->length() <= 1) {
1890 // We will reuse the transitive interfaces list if we're certain
1891 // it's in hash order.
1892 uintx bitmap = compute_secondary_supers_bitmap(interfaces);
1893 set_secondary_supers(interfaces, bitmap);
1894 return nullptr;
1895 }
1896 // Copy transitive interfaces to a temporary growable array to be constructed
1897 // into the secondary super list with extra slots.
1898 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1899 for (int i = 0; i < interfaces->length(); i++) {
1900 secondaries->push(interfaces->at(i));
1901 }
1902 return secondaries;
1903 }
1904
1905 bool InstanceKlass::implements_interface(Klass* k) const {
1906 if (this == k) return true;
1907 assert(k->is_interface(), "should be an interface class");
1908 for (int i = 0; i < transitive_interfaces()->length(); i++) {
1909 if (transitive_interfaces()->at(i) == k) {
1910 return true;
1911 }
1912 }
1913 return false;
1914 }
1915
1916 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1917 // Verify direct super interface
1918 if (this == k) return true;
1919 assert(k->is_interface(), "should be an interface class");
1920 for (int i = 0; i < local_interfaces()->length(); i++) {
1921 if (local_interfaces()->at(i) == k) {
1922 return true;
1923 }
1924 }
1925 return false;
1926 }
1927
1928 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1929 if (TraceFinalizerRegistration) {
1930 tty->print("Registered ");
1931 i->print_value_on(tty);
1932 tty->print_cr(" (" PTR_FORMAT ") as finalizable", p2i(i));
1933 }
1934 instanceHandle h_i(THREAD, i);
1935 // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1936 JavaValue result(T_VOID);
1937 JavaCallArguments args(h_i);
1938 methodHandle mh(THREAD, Universe::finalizer_register_method());
1939 JavaCalls::call(&result, mh, &args, CHECK_NULL);
1940 MANAGEMENT_ONLY(FinalizerService::on_register(h_i(), THREAD);)
1941 return h_i();
1942 }
1943
1944 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1945 assert(!is_abstract() && !is_interface(), "Should not create this object");
1946 size_t size = size_helper(); // Query before forming handle.
1947 return (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL);
1948 }
1949
1950 instanceOop InstanceKlass::allocate_instance(oop java_class, TRAPS) {
1951 Klass* k = java_lang_Class::as_Klass(java_class);
1952 if (k == nullptr) {
1953 ResourceMark rm(THREAD);
1954 THROW_(vmSymbols::java_lang_InstantiationException(), nullptr);
1955 }
1956 InstanceKlass* ik = cast(k);
1957 ik->check_valid_for_instantiation(false, CHECK_NULL);
1958 ik->initialize(CHECK_NULL);
1959 return ik->allocate_instance(THREAD);
1960 }
1961
1962 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) {
1963 return instanceHandle(THREAD, allocate_instance(THREAD));
1964 }
1965
1966 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1967 if (is_interface() || is_abstract()) {
1968 ResourceMark rm(THREAD);
1969 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1970 : vmSymbols::java_lang_InstantiationException(), external_name());
1971 }
1972 if (this == vmClasses::Class_klass()) {
1973 ResourceMark rm(THREAD);
1974 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1975 : vmSymbols::java_lang_IllegalAccessException(), external_name());
1976 }
1977 }
1978
1979 ArrayKlass* InstanceKlass::array_klass(int n, TRAPS) {
1980 // Need load-acquire for lock-free read
1981 if (array_klasses_acquire() == nullptr) {
1982
1983 // Recursively lock array allocation
1984 RecursiveLocker rl(MultiArray_lock, THREAD);
1985
1986 // Check if another thread created the array klass while we were waiting for the lock.
1987 if (array_klasses() == nullptr) {
1988 ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL);
1989 // use 'release' to pair with lock-free load
1990 release_set_array_klasses(k);
1991 }
1992 }
1993
1994 // array_klasses() will always be set at this point
1995 ArrayKlass* ak = array_klasses();
1996 assert(ak != nullptr, "should be set");
1997 return ak->array_klass(n, THREAD);
1998 }
1999
2000 ArrayKlass* InstanceKlass::array_klass_or_null(int n) {
2001 // Need load-acquire for lock-free read
2002 ArrayKlass* ak = array_klasses_acquire();
2003 if (ak == nullptr) {
2004 return nullptr;
2005 } else {
2006 return ak->array_klass_or_null(n);
2007 }
2008 }
2009
2010 ArrayKlass* InstanceKlass::array_klass(TRAPS) {
2011 return array_klass(1, THREAD);
2012 }
2013
2014 ArrayKlass* InstanceKlass::array_klass_or_null() {
2015 return array_klass_or_null(1);
2016 }
2017
2018 Method* InstanceKlass::class_initializer() const {
2019 Method* clinit = find_method(
2020 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
2021 if (clinit != nullptr && clinit->is_class_initializer()) {
2022 return clinit;
2023 }
2024 return nullptr;
2025 }
2026
2027 void InstanceKlass::call_class_initializer(TRAPS) {
2028 if (ReplayCompiles &&
2029 (ReplaySuppressInitializers == 1 ||
2030 (ReplaySuppressInitializers >= 2 && class_loader() != nullptr))) {
2031 // Hide the existence of the initializer for the purpose of replaying the compile
2032 return;
2033 }
2034
2035 #if INCLUDE_CDS
2036 // This is needed to ensure the consistency of the archived heap objects.
2037 if (has_aot_initialized_mirror() && CDSConfig::is_loading_heap()) {
2038 AOTClassInitializer::call_runtime_setup(THREAD, this);
2039 return;
2040 } else if (has_archived_enum_objs()) {
2041 assert(in_aot_cache(), "must be");
2042 bool initialized = CDSEnumKlass::initialize_enum_klass(this, CHECK);
2043 if (initialized) {
2044 return;
2045 }
2046 }
2047 #endif
2048
2049 methodHandle h_method(THREAD, class_initializer());
2050 assert(!is_initialized(), "we cannot initialize twice");
2051 LogTarget(Info, class, init) lt;
2052 if (lt.is_enabled()) {
2053 ResourceMark rm(THREAD);
2054 LogStream ls(lt);
2055 ls.print("%d Initializing ", call_class_initializer_counter++);
2056 name()->print_value_on(&ls);
2057 ls.print_cr("%s (" PTR_FORMAT ") by thread \"%s\"",
2058 h_method() == nullptr ? "(no method)" : "", p2i(this),
2059 THREAD->name());
2060 }
2061 if (h_method() != nullptr) {
2062 ThreadInClassInitializer ticl(THREAD, this); // Track class being initialized
2063 JavaCallArguments args; // No arguments
2064 JavaValue result(T_VOID);
2065 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
2066 }
2067 }
2068
2069 // If a class that implements this interface is initialized, is the JVM required
2070 // to first execute a <clinit> method declared in this interface,
2071 // or (if also_check_supers==true) any of the super types of this interface?
2072 //
2073 // JVMS 5.5. Initialization, step 7: Next, if C is a class rather than
2074 // an interface, then let SC be its superclass and let SI1, ..., SIn
2075 // be all superinterfaces of C (whether direct or indirect) that
2076 // declare at least one non-abstract, non-static method.
2077 //
2078 // So when an interface is initialized, it does not look at its
2079 // supers. But a proper class will ensure that all of its supers have
2080 // run their <clinit> methods, except that it disregards interfaces
2081 // that lack a non-static concrete method (i.e., a default method).
2082 // Therefore, you should probably call this method only when the
2083 // current class is a super of some proper class, not an interface.
2084 bool InstanceKlass::interface_needs_clinit_execution_as_super(bool also_check_supers) const {
2085 assert(is_interface(), "must be");
2086
2087 if (!has_nonstatic_concrete_methods()) {
2088 // quick check: no nonstatic concrete methods are declared by this or any super interfaces
2089 return false;
2090 }
2091
2092 // JVMS 5.5. Initialization
2093 // ...If C is an interface that declares a non-abstract,
2094 // non-static method, the initialization of a class that
2095 // implements C directly or indirectly.
2096 if (declares_nonstatic_concrete_methods() && class_initializer() != nullptr) {
2097 return true;
2098 }
2099 if (also_check_supers) {
2100 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2101 for (int i = 0; i < all_ifs->length(); ++i) {
2102 InstanceKlass* super_intf = all_ifs->at(i);
2103 if (super_intf->declares_nonstatic_concrete_methods() && super_intf->class_initializer() != nullptr) {
2104 return true;
2105 }
2106 }
2107 }
2108 return false;
2109 }
2110
2111 void InstanceKlass::mask_for(const methodHandle& method, int bci,
2112 InterpreterOopMap* entry_for) {
2113 // Lazily create the _oop_map_cache at first request.
2114 // Load_acquire is needed to safely get instance published with CAS by another thread.
2115 OopMapCache* oop_map_cache = AtomicAccess::load_acquire(&_oop_map_cache);
2116 if (oop_map_cache == nullptr) {
2117 // Try to install new instance atomically.
2118 oop_map_cache = new OopMapCache();
2119 OopMapCache* other = AtomicAccess::cmpxchg(&_oop_map_cache, (OopMapCache*)nullptr, oop_map_cache);
2120 if (other != nullptr) {
2121 // Someone else managed to install before us, ditch local copy and use the existing one.
2122 delete oop_map_cache;
2123 oop_map_cache = other;
2124 }
2125 }
2126 // _oop_map_cache is constant after init; lookup below does its own locking.
2127 oop_map_cache->lookup(method, bci, entry_for);
2128 }
2129
2130
2131 FieldInfo InstanceKlass::field(int index) const {
2132 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2133 if (fs.index() == index) {
2134 return fs.to_FieldInfo();
2135 }
2136 }
2137 fatal("Field not found");
2138 return FieldInfo();
2139 }
2140
2141 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2142 JavaFieldStream fs(this);
2143 if (fs.lookup(name, sig)) {
2144 assert(fs.name() == name, "name must match");
2145 assert(fs.signature() == sig, "signature must match");
2146 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
2147 return true;
2148 }
2149 return false;
2150 }
2151
2152
2153 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2154 const int n = local_interfaces()->length();
2155 for (int i = 0; i < n; i++) {
2156 InstanceKlass* intf1 = local_interfaces()->at(i);
2157 assert(intf1->is_interface(), "just checking type");
2158 // search for field in current interface
2159 if (intf1->find_local_field(name, sig, fd)) {
2160 assert(fd->is_static(), "interface field must be static");
2161 return intf1;
2162 }
2163 // search for field in direct superinterfaces
2164 Klass* intf2 = intf1->find_interface_field(name, sig, fd);
2165 if (intf2 != nullptr) return intf2;
2166 }
2167 // otherwise field lookup fails
2168 return nullptr;
2169 }
2170
2171
2172 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2173 // search order according to newest JVM spec (5.4.3.2, p.167).
2174 // 1) search for field in current klass
2175 if (find_local_field(name, sig, fd)) {
2176 return const_cast<InstanceKlass*>(this);
2177 }
2178 // 2) search for field recursively in direct superinterfaces
2179 { Klass* intf = find_interface_field(name, sig, fd);
2180 if (intf != nullptr) return intf;
2181 }
2182 // 3) apply field lookup recursively if superclass exists
2183 { InstanceKlass* supr = super();
2184 if (supr != nullptr) return supr->find_field(name, sig, fd);
2185 }
2186 // 4) otherwise field lookup fails
2187 return nullptr;
2188 }
2189
2190
2191 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
2192 // search order according to newest JVM spec (5.4.3.2, p.167).
2193 // 1) search for field in current klass
2194 if (find_local_field(name, sig, fd)) {
2195 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
2196 }
2197 // 2) search for field recursively in direct superinterfaces
2198 if (is_static) {
2199 Klass* intf = find_interface_field(name, sig, fd);
2200 if (intf != nullptr) return intf;
2201 }
2202 // 3) apply field lookup recursively if superclass exists
2203 { InstanceKlass* supr = super();
2204 if (supr != nullptr) return supr->find_field(name, sig, is_static, fd);
2205 }
2206 // 4) otherwise field lookup fails
2207 return nullptr;
2208 }
2209
2210 bool InstanceKlass::contains_field_offset(int offset) {
2211 if (this->is_inline_klass()) {
2212 InlineKlass* vk = InlineKlass::cast(this);
2213 return offset >= vk->payload_offset() && offset < (vk->payload_offset() + vk->payload_size_in_bytes());
2214 } else {
2215 fieldDescriptor fd;
2216 return find_field_from_offset(offset, false, &fd);
2217 }
2218 }
2219
2220 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
2221 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2222 if (fs.offset() == offset) {
2223 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
2224 if (fd->is_static() == is_static) return true;
2225 }
2226 }
2227 return false;
2228 }
2229
2230
2231 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
2232 const InstanceKlass* klass = this;
2233 while (klass != nullptr) {
2234 if (klass->find_local_field_from_offset(offset, is_static, fd)) {
2235 return true;
2236 }
2237 klass = klass->super();
2238 }
2239 return false;
2240 }
2241
2242 bool InstanceKlass::find_local_flat_field_containing_offset(int offset, fieldDescriptor* fd) const {
2243 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2244 if (!fs.is_flat()) {
2245 continue;
2246 }
2247
2248 if (fs.offset() > offset) {
2249 continue;
2250 }
2251
2252 const int offset_in_flat_field = offset - fs.offset();
2253 const InlineLayoutInfo layout_info = inline_layout_info(fs.index());
2254 const int field_size = layout_info.klass()->layout_size_in_bytes(layout_info.kind());
2255
2256 assert(LayoutKindHelper::is_flat(layout_info.kind()), "Must be flat");
2257
2258 if (offset_in_flat_field < field_size) {
2259 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
2260 assert(!fd->is_static(), "Static fields are not flattened");
2261
2262 return true;
2263 }
2264 }
2265
2266 return false;
2267 }
2268
2269 bool InstanceKlass::find_flat_field_containing_offset(int offset, fieldDescriptor* fd) const {
2270 const InstanceKlass* klass = this;
2271 while (klass != nullptr) {
2272 if (klass->find_local_flat_field_containing_offset(offset, fd)) {
2273 return true;
2274 }
2275
2276 klass = klass->super();
2277 }
2278
2279 return false;
2280 }
2281
2282 void InstanceKlass::methods_do(void f(Method* method)) {
2283 // Methods aren't stable until they are loaded. This can be read outside
2284 // a lock through the ClassLoaderData for profiling
2285 // Redefined scratch classes are on the list and need to be cleaned
2286 if (!is_loaded() && !is_scratch_class()) {
2287 return;
2288 }
2289
2290 int len = methods()->length();
2291 for (int index = 0; index < len; index++) {
2292 Method* m = methods()->at(index);
2293 assert(m->is_method(), "must be method");
2294 f(m);
2295 }
2296 }
2297
2298
2299 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
2300 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2301 if (fs.access_flags().is_static()) {
2302 fieldDescriptor& fd = fs.field_descriptor();
2303 cl->do_field(&fd);
2304 }
2305 }
2306 }
2307
2308
2309 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
2310 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2311 if (fs.access_flags().is_static()) {
2312 fieldDescriptor& fd = fs.field_descriptor();
2313 f(&fd, mirror, CHECK);
2314 }
2315 }
2316 }
2317
2318 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
2319 InstanceKlass* super = this->super();
2320 if (super != nullptr) {
2321 super->do_nonstatic_fields(cl);
2322 }
2323 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2324 fieldDescriptor& fd = fs.field_descriptor();
2325 if (!fd.is_static()) {
2326 cl->do_field(&fd);
2327 }
2328 }
2329 }
2330
2331 static int compare_fields_by_offset(FieldInfo* a, FieldInfo* b) {
2332 return a->offset() - b->offset();
2333 }
2334
2335 void InstanceKlass::print_nonstatic_fields(FieldClosure* cl) {
2336 InstanceKlass* super = this->super();
2337 if (super != nullptr) {
2338 super->print_nonstatic_fields(cl);
2339 }
2340 ResourceMark rm;
2341 // In DebugInfo nonstatic fields are sorted by offset.
2342 GrowableArray<FieldInfo> fields_sorted;
2343 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2344 if (!fs.access_flags().is_static()) {
2345 fields_sorted.push(fs.to_FieldInfo());
2346 }
2347 }
2348 int length = fields_sorted.length();
2349 if (length > 0) {
2350 fields_sorted.sort(compare_fields_by_offset);
2351 fieldDescriptor fd;
2352 for (int i = 0; i < length; i++) {
2353 fd.reinitialize(this, fields_sorted.at(i));
2354 assert(!fd.is_static() && fd.offset() == checked_cast<int>(fields_sorted.at(i).offset()), "only nonstatic fields");
2355 cl->do_field(&fd);
2356 }
2357 }
2358 }
2359
2360 #ifdef ASSERT
2361 static int linear_search(const Array<Method*>* methods,
2362 const Symbol* name,
2363 const Symbol* signature) {
2364 const int len = methods->length();
2365 for (int index = 0; index < len; index++) {
2366 const Method* const m = methods->at(index);
2367 assert(m->is_method(), "must be method");
2368 if (m->signature() == signature && m->name() == name) {
2369 return index;
2370 }
2371 }
2372 return -1;
2373 }
2374 #endif
2375
2376 bool InstanceKlass::_disable_method_binary_search = false;
2377
2378 NOINLINE int linear_search(const Array<Method*>* methods, const Symbol* name) {
2379 int len = methods->length();
2380 int l = 0;
2381 int h = len - 1;
2382 while (l <= h) {
2383 Method* m = methods->at(l);
2384 if (m->name() == name) {
2385 return l;
2386 }
2387 l++;
2388 }
2389 return -1;
2390 }
2391
2392 inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
2393 if (_disable_method_binary_search) {
2394 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
2395 // At the final stage of dynamic dumping, the methods array may not be sorted
2396 // by ascending addresses of their names, so we can't use binary search anymore.
2397 // However, methods with the same name are still laid out consecutively inside the
2398 // methods array, so let's look for the first one that matches.
2399 return linear_search(methods, name);
2400 }
2401
2402 int len = methods->length();
2403 int l = 0;
2404 int h = len - 1;
2405
2406 // methods are sorted by ascending addresses of their names, so do binary search
2407 while (l <= h) {
2408 int mid = (l + h) >> 1;
2409 Method* m = methods->at(mid);
2410 assert(m->is_method(), "must be method");
2411 int res = m->name()->fast_compare(name);
2412 if (res == 0) {
2413 return mid;
2414 } else if (res < 0) {
2415 l = mid + 1;
2416 } else {
2417 h = mid - 1;
2418 }
2419 }
2420 return -1;
2421 }
2422
2423 // find_method looks up the name/signature in the local methods array
2424 Method* InstanceKlass::find_method(const Symbol* name,
2425 const Symbol* signature) const {
2426 return find_method_impl(name, signature,
2427 OverpassLookupMode::find,
2428 StaticLookupMode::find,
2429 PrivateLookupMode::find);
2430 }
2431
2432 Method* InstanceKlass::find_method_impl(const Symbol* name,
2433 const Symbol* signature,
2434 OverpassLookupMode overpass_mode,
2435 StaticLookupMode static_mode,
2436 PrivateLookupMode private_mode) const {
2437 return InstanceKlass::find_method_impl(methods(),
2438 name,
2439 signature,
2440 overpass_mode,
2441 static_mode,
2442 private_mode);
2443 }
2444
2445 // find_instance_method looks up the name/signature in the local methods array
2446 // and skips over static methods
2447 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
2448 const Symbol* name,
2449 const Symbol* signature,
2450 PrivateLookupMode private_mode) {
2451 Method* const meth = InstanceKlass::find_method_impl(methods,
2452 name,
2453 signature,
2454 OverpassLookupMode::find,
2455 StaticLookupMode::skip,
2456 private_mode);
2457 assert(((meth == nullptr) || !meth->is_static()),
2458 "find_instance_method should have skipped statics");
2459 return meth;
2460 }
2461
2462 // find_instance_method looks up the name/signature in the local methods array
2463 // and skips over static methods
2464 Method* InstanceKlass::find_instance_method(const Symbol* name,
2465 const Symbol* signature,
2466 PrivateLookupMode private_mode) const {
2467 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
2468 }
2469
2470 // Find looks up the name/signature in the local methods array
2471 // and filters on the overpass, static and private flags
2472 // This returns the first one found
2473 // note that the local methods array can have up to one overpass, one static
2474 // and one instance (private or not) with the same name/signature
2475 Method* InstanceKlass::find_local_method(const Symbol* name,
2476 const Symbol* signature,
2477 OverpassLookupMode overpass_mode,
2478 StaticLookupMode static_mode,
2479 PrivateLookupMode private_mode) const {
2480 return InstanceKlass::find_method_impl(methods(),
2481 name,
2482 signature,
2483 overpass_mode,
2484 static_mode,
2485 private_mode);
2486 }
2487
2488 // Find looks up the name/signature in the local methods array
2489 // and filters on the overpass, static and private flags
2490 // This returns the first one found
2491 // note that the local methods array can have up to one overpass, one static
2492 // and one instance (private or not) with the same name/signature
2493 Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
2494 const Symbol* name,
2495 const Symbol* signature,
2496 OverpassLookupMode overpass_mode,
2497 StaticLookupMode static_mode,
2498 PrivateLookupMode private_mode) {
2499 return InstanceKlass::find_method_impl(methods,
2500 name,
2501 signature,
2502 overpass_mode,
2503 static_mode,
2504 private_mode);
2505 }
2506
2507 Method* InstanceKlass::find_method(const Array<Method*>* methods,
2508 const Symbol* name,
2509 const Symbol* signature) {
2510 return InstanceKlass::find_method_impl(methods,
2511 name,
2512 signature,
2513 OverpassLookupMode::find,
2514 StaticLookupMode::find,
2515 PrivateLookupMode::find);
2516 }
2517
2518 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
2519 const Symbol* name,
2520 const Symbol* signature,
2521 OverpassLookupMode overpass_mode,
2522 StaticLookupMode static_mode,
2523 PrivateLookupMode private_mode) {
2524 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
2525 return hit >= 0 ? methods->at(hit): nullptr;
2526 }
2527
2528 // true if method matches signature and conforms to skipping_X conditions.
2529 static bool method_matches(const Method* m,
2530 const Symbol* signature,
2531 bool skipping_overpass,
2532 bool skipping_static,
2533 bool skipping_private) {
2534 return ((m->signature() == signature) &&
2535 (!skipping_overpass || !m->is_overpass()) &&
2536 (!skipping_static || !m->is_static()) &&
2537 (!skipping_private || !m->is_private()));
2538 }
2539
2540 // Used directly for default_methods to find the index into the
2541 // default_vtable_indices, and indirectly by find_method
2542 // find_method_index looks in the local methods array to return the index
2543 // of the matching name/signature. If, overpass methods are being ignored,
2544 // the search continues to find a potential non-overpass match. This capability
2545 // is important during method resolution to prefer a static method, for example,
2546 // over an overpass method.
2547 // There is the possibility in any _method's array to have the same name/signature
2548 // for a static method, an overpass method and a local instance method
2549 // To correctly catch a given method, the search criteria may need
2550 // to explicitly skip the other two. For local instance methods, it
2551 // is often necessary to skip private methods
2552 int InstanceKlass::find_method_index(const Array<Method*>* methods,
2553 const Symbol* name,
2554 const Symbol* signature,
2555 OverpassLookupMode overpass_mode,
2556 StaticLookupMode static_mode,
2557 PrivateLookupMode private_mode) {
2558 const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip);
2559 const bool skipping_static = (static_mode == StaticLookupMode::skip);
2560 const bool skipping_private = (private_mode == PrivateLookupMode::skip);
2561 const int hit = quick_search(methods, name);
2562 if (hit != -1) {
2563 const Method* const m = methods->at(hit);
2564
2565 // Do linear search to find matching signature. First, quick check
2566 // for common case, ignoring overpasses if requested.
2567 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2568 return hit;
2569 }
2570
2571 // search downwards through overloaded methods
2572 int i;
2573 for (i = hit - 1; i >= 0; --i) {
2574 const Method* const m = methods->at(i);
2575 assert(m->is_method(), "must be method");
2576 if (m->name() != name) {
2577 break;
2578 }
2579 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2580 return i;
2581 }
2582 }
2583 // search upwards
2584 for (i = hit + 1; i < methods->length(); ++i) {
2585 const Method* const m = methods->at(i);
2586 assert(m->is_method(), "must be method");
2587 if (m->name() != name) {
2588 break;
2589 }
2590 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2591 return i;
2592 }
2593 }
2594 // not found
2595 #ifdef ASSERT
2596 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
2597 linear_search(methods, name, signature);
2598 assert(-1 == index, "binary search should have found entry %d", index);
2599 #endif
2600 }
2601 return -1;
2602 }
2603
2604 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
2605 return find_method_by_name(methods(), name, end);
2606 }
2607
2608 int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
2609 const Symbol* name,
2610 int* end_ptr) {
2611 assert(end_ptr != nullptr, "just checking");
2612 int start = quick_search(methods, name);
2613 int end = start + 1;
2614 if (start != -1) {
2615 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
2616 while (end < methods->length() && (methods->at(end))->name() == name) ++end;
2617 *end_ptr = end;
2618 return start;
2619 }
2620 return -1;
2621 }
2622
2623 // uncached_lookup_method searches both the local class methods array and all
2624 // superclasses methods arrays, skipping any overpass methods in superclasses,
2625 // and possibly skipping private methods.
2626 Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
2627 const Symbol* signature,
2628 OverpassLookupMode overpass_mode,
2629 PrivateLookupMode private_mode) const {
2630 OverpassLookupMode overpass_local_mode = overpass_mode;
2631 const InstanceKlass* klass = this;
2632 while (klass != nullptr) {
2633 Method* const method = klass->find_method_impl(name,
2634 signature,
2635 overpass_local_mode,
2636 StaticLookupMode::find,
2637 private_mode);
2638 if (method != nullptr) {
2639 return method;
2640 }
2641 if (name == vmSymbols::object_initializer_name()) {
2642 break; // <init> is never inherited
2643 }
2644 klass = klass->super();
2645 overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses
2646 }
2647 return nullptr;
2648 }
2649
2650 #ifdef ASSERT
2651 // search through class hierarchy and return true if this class or
2652 // one of the superclasses was redefined
2653 bool InstanceKlass::has_redefined_this_or_super() const {
2654 const InstanceKlass* klass = this;
2655 while (klass != nullptr) {
2656 if (klass->has_been_redefined()) {
2657 return true;
2658 }
2659 klass = klass->super();
2660 }
2661 return false;
2662 }
2663 #endif
2664
2665 // lookup a method in the default methods list then in all transitive interfaces
2666 // Do NOT return private or static methods
2667 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
2668 Symbol* signature) const {
2669 Method* m = nullptr;
2670 if (default_methods() != nullptr) {
2671 m = find_method(default_methods(), name, signature);
2672 }
2673 // Look up interfaces
2674 if (m == nullptr) {
2675 m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find);
2676 }
2677 return m;
2678 }
2679
2680 // lookup a method in all the interfaces that this class implements
2681 // Do NOT return private or static methods, new in JDK8 which are not externally visible
2682 // They should only be found in the initial InterfaceMethodRef
2683 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
2684 Symbol* signature,
2685 DefaultsLookupMode defaults_mode) const {
2686 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2687 int num_ifs = all_ifs->length();
2688 InstanceKlass *ik = nullptr;
2689 for (int i = 0; i < num_ifs; i++) {
2690 ik = all_ifs->at(i);
2691 Method* m = ik->lookup_method(name, signature);
2692 if (m != nullptr && m->is_public() && !m->is_static() &&
2693 ((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) {
2694 return m;
2695 }
2696 }
2697 return nullptr;
2698 }
2699
2700 PrintClassClosure::PrintClassClosure(outputStream* st, bool verbose)
2701 :_st(st), _verbose(verbose) {
2702 ResourceMark rm;
2703 _st->print("%-18s ", "KlassAddr");
2704 _st->print("%-4s ", "Size");
2705 _st->print("%-20s ", "State");
2706 _st->print("%-7s ", "Flags");
2707 _st->print("%-5s ", "ClassName");
2708 _st->cr();
2709 }
2710
2711 void PrintClassClosure::do_klass(Klass* k) {
2712 ResourceMark rm;
2713 // klass pointer
2714 _st->print(PTR_FORMAT " ", p2i(k));
2715 // klass size
2716 _st->print("%4d ", k->size());
2717 // initialization state
2718 if (k->is_instance_klass()) {
2719 _st->print("%-20s ",InstanceKlass::cast(k)->init_state_name());
2720 } else {
2721 _st->print("%-20s ","");
2722 }
2723 // misc flags(Changes should synced with ClassesDCmd::ClassesDCmd help doc)
2724 char buf[10];
2725 int i = 0;
2726 if (k->has_finalizer()) buf[i++] = 'F';
2727 if (k->is_instance_klass()) {
2728 InstanceKlass* ik = InstanceKlass::cast(k);
2729 if (ik->has_final_method()) buf[i++] = 'f';
2730 if (ik->is_rewritten()) buf[i++] = 'W';
2731 if (ik->is_contended()) buf[i++] = 'C';
2732 if (ik->has_been_redefined()) buf[i++] = 'R';
2733 if (ik->in_aot_cache()) buf[i++] = 'S';
2734 }
2735 buf[i++] = '\0';
2736 _st->print("%-7s ", buf);
2737 // klass name
2738 _st->print("%-5s ", k->external_name());
2739 // end
2740 _st->cr();
2741 if (_verbose) {
2742 k->print_on(_st);
2743 }
2744 }
2745
2746 /* jni_id_for for jfieldIds only */
2747 JNIid* InstanceKlass::jni_id_for(int offset) {
2748 MutexLocker ml(JfieldIdCreation_lock);
2749 JNIid* probe = jni_ids() == nullptr ? nullptr : jni_ids()->find(offset);
2750 if (probe == nullptr) {
2751 // Allocate new static field identifier
2752 probe = new JNIid(this, offset, jni_ids());
2753 set_jni_ids(probe);
2754 }
2755 return probe;
2756 }
2757
2758 u2 InstanceKlass::enclosing_method_data(int offset) const {
2759 const Array<jushort>* const inner_class_list = inner_classes();
2760 if (inner_class_list == nullptr) {
2761 return 0;
2762 }
2763 const int length = inner_class_list->length();
2764 if (length % inner_class_next_offset == 0) {
2765 return 0;
2766 }
2767 const int index = length - enclosing_method_attribute_size;
2768 assert(offset < enclosing_method_attribute_size, "invalid offset");
2769 return inner_class_list->at(index + offset);
2770 }
2771
2772 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
2773 u2 method_index) {
2774 Array<jushort>* inner_class_list = inner_classes();
2775 assert (inner_class_list != nullptr, "_inner_classes list is not set up");
2776 int length = inner_class_list->length();
2777 if (length % inner_class_next_offset == enclosing_method_attribute_size) {
2778 int index = length - enclosing_method_attribute_size;
2779 inner_class_list->at_put(
2780 index + enclosing_method_class_index_offset, class_index);
2781 inner_class_list->at_put(
2782 index + enclosing_method_method_index_offset, method_index);
2783 }
2784 }
2785
2786 jmethodID InstanceKlass::update_jmethod_id(jmethodID* jmeths, Method* method, int idnum) {
2787 if (method->is_old() && !method->is_obsolete()) {
2788 // If the method passed in is old (but not obsolete), use the current version.
2789 method = method_with_idnum((int)idnum);
2790 assert(method != nullptr, "old and but not obsolete, so should exist");
2791 }
2792 jmethodID new_id = Method::make_jmethod_id(class_loader_data(), method);
2793 AtomicAccess::release_store(&jmeths[idnum + 1], new_id);
2794 return new_id;
2795 }
2796
2797 // Allocate the jmethodID cache.
2798 static jmethodID* create_jmethod_id_cache(size_t size) {
2799 jmethodID* jmeths = NEW_C_HEAP_ARRAY(jmethodID, size + 1, mtClass);
2800 memset(jmeths, 0, (size + 1) * sizeof(jmethodID));
2801 // cache size is stored in element[0], other elements offset by one
2802 jmeths[0] = (jmethodID)size;
2803 return jmeths;
2804 }
2805
2806 // When reading outside a lock, use this.
2807 jmethodID* InstanceKlass::methods_jmethod_ids_acquire() const {
2808 return AtomicAccess::load_acquire(&_methods_jmethod_ids);
2809 }
2810
2811 void InstanceKlass::release_set_methods_jmethod_ids(jmethodID* jmeths) {
2812 AtomicAccess::release_store(&_methods_jmethod_ids, jmeths);
2813 }
2814
2815 // Lookup or create a jmethodID.
2816 jmethodID InstanceKlass::get_jmethod_id(Method* method) {
2817 int idnum = method->method_idnum();
2818 jmethodID* jmeths = methods_jmethod_ids_acquire();
2819
2820 // We use a double-check locking idiom here because this cache is
2821 // performance sensitive. In the normal system, this cache only
2822 // transitions from null to non-null which is safe because we use
2823 // release_set_methods_jmethod_ids() to advertise the new cache.
2824 // A partially constructed cache should never be seen by a racing
2825 // thread. We also use release_store() to save a new jmethodID
2826 // in the cache so a partially constructed jmethodID should never be
2827 // seen either. Cache reads of existing jmethodIDs proceed without a
2828 // lock, but cache writes of a new jmethodID requires uniqueness and
2829 // creation of the cache itself requires no leaks so a lock is
2830 // acquired in those two cases.
2831 //
2832 // If the RedefineClasses() API has been used, then this cache grows
2833 // in the redefinition safepoint.
2834
2835 if (jmeths == nullptr) {
2836 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2837 jmeths = _methods_jmethod_ids;
2838 // Still null?
2839 if (jmeths == nullptr) {
2840 size_t size = idnum_allocated_count();
2841 assert(size > (size_t)idnum, "should already have space");
2842 jmeths = create_jmethod_id_cache(size);
2843 jmethodID new_id = update_jmethod_id(jmeths, method, idnum);
2844
2845 // publish jmeths
2846 release_set_methods_jmethod_ids(jmeths);
2847 return new_id;
2848 }
2849 }
2850
2851 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2852 if (id == nullptr) {
2853 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2854 id = jmeths[idnum + 1];
2855 // Still null?
2856 if (id == nullptr) {
2857 return update_jmethod_id(jmeths, method, idnum);
2858 }
2859 }
2860 return id;
2861 }
2862
2863 void InstanceKlass::update_methods_jmethod_cache() {
2864 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2865 jmethodID* cache = _methods_jmethod_ids;
2866 if (cache != nullptr) {
2867 size_t size = idnum_allocated_count();
2868 size_t old_size = (size_t)cache[0];
2869 if (old_size < size + 1) {
2870 // Allocate a larger one and copy entries to the new one.
2871 // They've already been updated to point to new methods where applicable (i.e., not obsolete).
2872 jmethodID* new_cache = create_jmethod_id_cache(size);
2873
2874 for (int i = 1; i <= (int)old_size; i++) {
2875 new_cache[i] = cache[i];
2876 }
2877 _methods_jmethod_ids = new_cache;
2878 FREE_C_HEAP_ARRAY(jmethodID, cache);
2879 }
2880 }
2881 }
2882
2883 // Make a jmethodID for all methods in this class. This makes getting all method
2884 // ids much, much faster with classes with more than 8
2885 // methods, and has a *substantial* effect on performance with jvmti
2886 // code that loads all jmethodIDs for all classes.
2887 void InstanceKlass::make_methods_jmethod_ids() {
2888 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2889 jmethodID* jmeths = _methods_jmethod_ids;
2890 if (jmeths == nullptr) {
2891 jmeths = create_jmethod_id_cache(idnum_allocated_count());
2892 release_set_methods_jmethod_ids(jmeths);
2893 }
2894
2895 int length = methods()->length();
2896 for (int index = 0; index < length; index++) {
2897 Method* m = methods()->at(index);
2898 int idnum = m->method_idnum();
2899 assert(!m->is_old(), "should not have old methods or I'm confused");
2900 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2901 if (!m->is_overpass() && // skip overpasses
2902 id == nullptr) {
2903 id = Method::make_jmethod_id(class_loader_data(), m);
2904 AtomicAccess::release_store(&jmeths[idnum + 1], id);
2905 }
2906 }
2907 }
2908
2909 // Lookup a jmethodID, null if not found. Do no blocking, no allocations, no handles
2910 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2911 int idnum = method->method_idnum();
2912 jmethodID* jmeths = methods_jmethod_ids_acquire();
2913 return (jmeths != nullptr) ? jmeths[idnum + 1] : nullptr;
2914 }
2915
2916 inline DependencyContext InstanceKlass::dependencies() {
2917 DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2918 return dep_context;
2919 }
2920
2921 void InstanceKlass::mark_dependent_nmethods(DeoptimizationScope* deopt_scope, KlassDepChange& changes) {
2922 dependencies().mark_dependent_nmethods(deopt_scope, changes);
2923 }
2924
2925 void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2926 assert_lock_strong(CodeCache_lock);
2927 dependencies().add_dependent_nmethod(nm);
2928 }
2929
2930 void InstanceKlass::clean_dependency_context() {
2931 dependencies().clean_unloading_dependents();
2932 }
2933
2934 #ifndef PRODUCT
2935 void InstanceKlass::print_dependent_nmethods(bool verbose) {
2936 dependencies().print_dependent_nmethods(verbose);
2937 }
2938
2939 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2940 return dependencies().is_dependent_nmethod(nm);
2941 }
2942 #endif //PRODUCT
2943
2944 void InstanceKlass::clean_weak_instanceklass_links() {
2945 clean_implementors_list();
2946 clean_method_data();
2947 }
2948
2949 void InstanceKlass::clean_implementors_list() {
2950 assert(is_loader_alive(), "this klass should be live");
2951 if (is_interface()) {
2952 assert (ClassUnloading, "only called for ClassUnloading");
2953 for (;;) {
2954 // Use load_acquire due to competing with inserts
2955 InstanceKlass* volatile* iklass = adr_implementor();
2956 assert(iklass != nullptr, "Klass must not be null");
2957 InstanceKlass* impl = AtomicAccess::load_acquire(iklass);
2958 if (impl != nullptr && !impl->is_loader_alive()) {
2959 // null this field, might be an unloaded instance klass or null
2960 if (AtomicAccess::cmpxchg(iklass, impl, (InstanceKlass*)nullptr) == impl) {
2961 // Successfully unlinking implementor.
2962 if (log_is_enabled(Trace, class, unload)) {
2963 ResourceMark rm;
2964 log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2965 }
2966 return;
2967 }
2968 } else {
2969 return;
2970 }
2971 }
2972 }
2973 }
2974
2975 void InstanceKlass::clean_method_data() {
2976 for (int m = 0; m < methods()->length(); m++) {
2977 MethodData* mdo = methods()->at(m)->method_data();
2978 if (mdo != nullptr) {
2979 mdo->clean_method_data(/*always_clean*/false);
2980 }
2981 }
2982 }
2983
2984 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
2985 Klass::metaspace_pointers_do(it);
2986
2987 if (log_is_enabled(Trace, aot)) {
2988 ResourceMark rm;
2989 log_trace(aot)("Iter(InstanceKlass): %p (%s)", this, external_name());
2990 }
2991
2992 it->push(&_annotations);
2993 it->push((Klass**)&_array_klasses);
2994 if (!is_rewritten()) {
2995 it->push(&_constants, MetaspaceClosure::_writable);
2996 } else {
2997 it->push(&_constants);
2998 }
2999 it->push(&_inner_classes);
3000 #if INCLUDE_JVMTI
3001 it->push(&_previous_versions);
3002 #endif
3003 #if INCLUDE_CDS
3004 // For "old" classes with methods containing the jsr bytecode, the _methods array will
3005 // be rewritten during runtime (see Rewriter::rewrite_jsrs()) but they cannot be safely
3006 // checked here with ByteCodeStream. All methods that can't be verified are made writable.
3007 // The length check on the _methods is necessary because classes which don't have any
3008 // methods share the Universe::_the_empty_method_array which is in the RO region.
3009 if (_methods != nullptr && _methods->length() > 0 && !can_be_verified_at_dumptime()) {
3010 // To handle jsr bytecode, new Method* maybe stored into _methods
3011 it->push(&_methods, MetaspaceClosure::_writable);
3012 } else {
3013 #endif
3014 it->push(&_methods);
3015 #if INCLUDE_CDS
3016 }
3017 #endif
3018 it->push(&_default_methods);
3019 it->push(&_local_interfaces);
3020 it->push(&_transitive_interfaces);
3021 it->push(&_method_ordering);
3022 if (!is_rewritten()) {
3023 it->push(&_default_vtable_indices, MetaspaceClosure::_writable);
3024 } else {
3025 it->push(&_default_vtable_indices);
3026 }
3027
3028 it->push(&_fieldinfo_stream);
3029 it->push(&_fieldinfo_search_table);
3030 // _fields_status might be written into by Rewriter::scan_method() -> fd.set_has_initialized_final_update()
3031 it->push(&_fields_status, MetaspaceClosure::_writable);
3032
3033 if (itable_length() > 0) {
3034 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
3035 int method_table_offset_in_words = ioe->offset()/wordSize;
3036 int itable_offset_in_words = (int)(start_of_itable() - (intptr_t*)this);
3037
3038 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words)
3039 / itableOffsetEntry::size();
3040
3041 for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
3042 if (ioe->interface_klass() != nullptr) {
3043 it->push(ioe->interface_klass_addr());
3044 itableMethodEntry* ime = ioe->first_method_entry(this);
3045 int n = klassItable::method_count_for_interface(ioe->interface_klass());
3046 for (int index = 0; index < n; index ++) {
3047 it->push(ime[index].method_addr());
3048 }
3049 }
3050 }
3051 }
3052
3053 it->push(&_nest_host);
3054 it->push(&_nest_members);
3055 it->push(&_permitted_subclasses);
3056 it->push(&_loadable_descriptors);
3057 it->push(&_acmp_maps_array, MetaspaceClosure::_writable);
3058 it->push(&_record_components);
3059 it->push(&_inline_layout_info_array, MetaspaceClosure::_writable);
3060
3061 if (CDSConfig::is_dumping_full_module_graph() && !defined_by_other_loaders()) {
3062 it->push(&_package_entry);
3063 }
3064 }
3065
3066 #if INCLUDE_CDS
3067 void InstanceKlass::remove_unshareable_info() {
3068
3069 if (is_linked()) {
3070 assert(can_be_verified_at_dumptime(), "must be");
3071 // Remember this so we can avoid walking the hierarchy at runtime.
3072 set_verified_at_dump_time();
3073 }
3074
3075 _misc_flags.set_has_init_deps_processed(false);
3076
3077 Klass::remove_unshareable_info();
3078
3079 if (SystemDictionaryShared::has_class_failed_verification(this)) {
3080 // Classes are attempted to link during dumping and may fail,
3081 // but these classes are still in the dictionary and class list in CLD.
3082 // If the class has failed verification, there is nothing else to remove.
3083 return;
3084 }
3085
3086 // Reset to the 'allocated' state to prevent any premature accessing to
3087 // a shared class at runtime while the class is still being loaded and
3088 // restored. A class' init_state is set to 'loaded' at runtime when it's
3089 // being added to class hierarchy (see InstanceKlass:::add_to_hierarchy()).
3090 _init_state = allocated;
3091
3092 { // Otherwise this needs to take out the Compile_lock.
3093 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
3094 init_implementor();
3095 }
3096
3097 // Call remove_unshareable_info() on other objects that belong to this class, except
3098 // for constants()->remove_unshareable_info(), which is called in a separate pass in
3099 // ArchiveBuilder::make_klasses_shareable(),
3100
3101 for (int i = 0; i < methods()->length(); i++) {
3102 Method* m = methods()->at(i);
3103 m->remove_unshareable_info();
3104 }
3105
3106 // do array classes also.
3107 if (array_klasses() != nullptr) {
3108 array_klasses()->remove_unshareable_info();
3109 }
3110
3111 // These are not allocated from metaspace. They are safe to set to nullptr.
3112 _source_debug_extension = nullptr;
3113 _dep_context = nullptr;
3114 _osr_nmethods_head = nullptr;
3115 #if INCLUDE_JVMTI
3116 _breakpoints = nullptr;
3117 _previous_versions = nullptr;
3118 _cached_class_file = nullptr;
3119 _jvmti_cached_class_field_map = nullptr;
3120 #endif
3121
3122 _init_thread = nullptr;
3123 _methods_jmethod_ids = nullptr;
3124 _jni_ids = nullptr;
3125 _oop_map_cache = nullptr;
3126 if (CDSConfig::is_dumping_method_handles() && HeapShared::is_lambda_proxy_klass(this)) {
3127 // keep _nest_host
3128 } else {
3129 // clear _nest_host to ensure re-load at runtime
3130 _nest_host = nullptr;
3131 }
3132 init_shared_package_entry();
3133 _dep_context_last_cleaned = 0;
3134 DEBUG_ONLY(_shared_class_load_count = 0);
3135
3136 remove_unshareable_flags();
3137
3138 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
3139 }
3140
3141 void InstanceKlass::remove_unshareable_flags() {
3142 // clear all the flags/stats that shouldn't be in the archived version
3143 assert(!is_scratch_class(), "must be");
3144 assert(!has_been_redefined(), "must be");
3145 #if INCLUDE_JVMTI
3146 set_is_being_redefined(false);
3147 #endif
3148 set_has_resolved_methods(false);
3149 }
3150
3151 void InstanceKlass::remove_java_mirror() {
3152 Klass::remove_java_mirror();
3153
3154 // do array classes also.
3155 if (array_klasses() != nullptr) {
3156 array_klasses()->remove_java_mirror();
3157 }
3158 }
3159
3160 void InstanceKlass::init_shared_package_entry() {
3161 assert(CDSConfig::is_dumping_archive(), "must be");
3162 if (!CDSConfig::is_dumping_full_module_graph() || defined_by_other_loaders()) {
3163 _package_entry = nullptr;
3164 }
3165 }
3166
3167 void InstanceKlass::compute_has_loops_flag_for_methods() {
3168 Array<Method*>* methods = this->methods();
3169 for (int index = 0; index < methods->length(); ++index) {
3170 Method* m = methods->at(index);
3171 if (!m->is_overpass()) { // work around JDK-8305771
3172 m->compute_has_loops_flag();
3173 }
3174 }
3175 }
3176
3177 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain,
3178 PackageEntry* pkg_entry, TRAPS) {
3179 // InstanceKlass::add_to_hierarchy() sets the init_state to loaded
3180 // before the InstanceKlass is added to the SystemDictionary. Make
3181 // sure the current state is <loaded.
3182 assert(!is_loaded(), "invalid init state");
3183 assert(!shared_loading_failed(), "Must not try to load failed class again");
3184 set_package(loader_data, pkg_entry, CHECK);
3185 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
3186
3187 if (is_inline_klass()) {
3188 InlineKlass::cast(this)->initialize_calling_convention(CHECK);
3189 }
3190
3191 Array<Method*>* methods = this->methods();
3192 int num_methods = methods->length();
3193 for (int index = 0; index < num_methods; ++index) {
3194 methods->at(index)->restore_unshareable_info(CHECK);
3195 }
3196 #if INCLUDE_JVMTI
3197 if (JvmtiExport::has_redefined_a_class()) {
3198 // Reinitialize vtable because RedefineClasses may have changed some
3199 // entries in this vtable for super classes so the CDS vtable might
3200 // point to old or obsolete entries. RedefineClasses doesn't fix up
3201 // vtables in the shared system dictionary, only the main one.
3202 // It also redefines the itable too so fix that too.
3203 // First fix any default methods that point to a super class that may
3204 // have been redefined.
3205 bool trace_name_printed = false;
3206 adjust_default_methods(&trace_name_printed);
3207 if (verified_at_dump_time()) {
3208 // Initialize vtable and itable for classes which can be verified at dump time.
3209 // Unlinked classes such as old classes with major version < 50 cannot be verified
3210 // at dump time.
3211 vtable().initialize_vtable();
3212 itable().initialize_itable();
3213 }
3214 }
3215 #endif // INCLUDE_JVMTI
3216
3217 // restore constant pool resolved references
3218 constants()->restore_unshareable_info(CHECK);
3219
3220 // Restore acmp_maps java array from the version stored in metadata.
3221 // if it cannot be found in the archive
3222 if (Arguments::is_valhalla_enabled() && has_acmp_maps_offset() && java_mirror()->obj_field(_acmp_maps_offset) == nullptr) {
3223 int acmp_maps_size = _acmp_maps_array->length();
3224 typeArrayOop map = oopFactory::new_intArray(acmp_maps_size, CHECK);
3225 typeArrayHandle map_h(THREAD, map);
3226 for (int i = 0; i < acmp_maps_size; i++) {
3227 map_h->int_at_put(i, _acmp_maps_array->at(i));
3228 }
3229 java_mirror()->obj_field_put(_acmp_maps_offset, map_h());
3230 }
3231
3232 if (array_klasses() != nullptr) {
3233 // To get a consistent list of classes we need MultiArray_lock to ensure
3234 // array classes aren't observed while they are being restored.
3235 RecursiveLocker rl(MultiArray_lock, THREAD);
3236 assert(this == ObjArrayKlass::cast(array_klasses())->bottom_klass(), "sanity");
3237 // Array classes have null protection domain.
3238 // --> see ArrayKlass::complete_create_array_klass()
3239 if (class_loader_data() == nullptr) {
3240 ResourceMark rm(THREAD);
3241 log_debug(cds)(" loader_data %s ", loader_data == nullptr ? "nullptr" : "non null");
3242 log_debug(cds)(" this %s array_klasses %s ", this->name()->as_C_string(), array_klasses()->name()->as_C_string());
3243 }
3244 assert(!array_klasses()->is_refined_objArray_klass(), "must be non-refined objarrayklass");
3245 array_klasses()->restore_unshareable_info(class_loader_data(), Handle(), CHECK);
3246 }
3247
3248 // Initialize @ValueBased class annotation if not already set in the archived klass.
3249 if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation() && !is_value_based()) {
3250 set_is_value_based();
3251 }
3252
3253 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
3254 }
3255
3256 bool InstanceKlass::can_be_verified_at_dumptime() const {
3257 if (CDSConfig::is_dumping_dynamic_archive() && AOTMetaspace::in_aot_cache(this)) {
3258 // This is a class that was dumped into the base archive, so we know
3259 // it was verified at dump time.
3260 return true;
3261 }
3262
3263 if (CDSConfig::is_preserving_verification_constraints()) {
3264 return true;
3265 }
3266
3267 if (CDSConfig::is_old_class_for_verifier(this)) {
3268 // The old verifier does not save verification constraints, so at run time
3269 // SystemDictionaryShared::check_verification_constraints() will not work for this class.
3270 return false;
3271 }
3272 if (super() != nullptr && !super()->can_be_verified_at_dumptime()) {
3273 return false;
3274 }
3275 Array<InstanceKlass*>* interfaces = local_interfaces();
3276 int len = interfaces->length();
3277 for (int i = 0; i < len; i++) {
3278 if (!interfaces->at(i)->can_be_verified_at_dumptime()) {
3279 return false;
3280 }
3281 }
3282 return true;
3283 }
3284
3285 #endif // INCLUDE_CDS
3286
3287 #if INCLUDE_JVMTI
3288 static void clear_all_breakpoints(Method* m) {
3289 m->clear_all_breakpoints();
3290 }
3291 #endif
3292
3293 void InstanceKlass::unload_class(InstanceKlass* ik) {
3294
3295 if (ik->is_scratch_class()) {
3296 assert(ik->dependencies().is_empty(), "dependencies should be empty for scratch classes");
3297 return;
3298 }
3299 assert(ik->is_loaded(), "class should be loaded " PTR_FORMAT, p2i(ik));
3300
3301 // Release dependencies.
3302 ik->dependencies().remove_all_dependents();
3303
3304 // notify the debugger
3305 if (JvmtiExport::should_post_class_unload()) {
3306 JvmtiExport::post_class_unload(ik);
3307 }
3308
3309 // notify ClassLoadingService of class unload
3310 ClassLoadingService::notify_class_unloaded(ik);
3311
3312 SystemDictionaryShared::handle_class_unloading(ik);
3313
3314 if (log_is_enabled(Info, class, unload)) {
3315 ResourceMark rm;
3316 log_info(class, unload)("unloading class %s " PTR_FORMAT, ik->external_name(), p2i(ik));
3317 }
3318
3319 Events::log_class_unloading(Thread::current(), ik);
3320
3321 #if INCLUDE_JFR
3322 assert(ik != nullptr, "invariant");
3323 EventClassUnload event;
3324 event.set_unloadedClass(ik);
3325 event.set_definingClassLoader(ik->class_loader_data());
3326 event.commit();
3327 #endif
3328 }
3329
3330 static void method_release_C_heap_structures(Method* m) {
3331 m->release_C_heap_structures();
3332 }
3333
3334 // Called also by InstanceKlass::deallocate_contents, with false for release_sub_metadata.
3335 void InstanceKlass::release_C_heap_structures(bool release_sub_metadata) {
3336 // Clean up C heap
3337 Klass::release_C_heap_structures();
3338
3339 // Deallocate and call destructors for MDO mutexes
3340 if (release_sub_metadata) {
3341 methods_do(method_release_C_heap_structures);
3342 }
3343
3344 // Deallocate oop map cache
3345 if (_oop_map_cache != nullptr) {
3346 delete _oop_map_cache;
3347 _oop_map_cache = nullptr;
3348 }
3349
3350 // Deallocate JNI identifiers for jfieldIDs
3351 JNIid::deallocate(jni_ids());
3352 set_jni_ids(nullptr);
3353
3354 jmethodID* jmeths = _methods_jmethod_ids;
3355 if (jmeths != nullptr) {
3356 release_set_methods_jmethod_ids(nullptr);
3357 FreeHeap(jmeths);
3358 }
3359
3360 assert(_dep_context == nullptr,
3361 "dependencies should already be cleaned");
3362
3363 #if INCLUDE_JVMTI
3364 // Deallocate breakpoint records
3365 if (breakpoints() != nullptr) {
3366 methods_do(clear_all_breakpoints);
3367 assert(breakpoints() == nullptr, "should have cleared breakpoints");
3368 }
3369
3370 // deallocate the cached class file
3371 if (_cached_class_file != nullptr) {
3372 os::free(_cached_class_file);
3373 _cached_class_file = nullptr;
3374 }
3375 #endif
3376
3377 FREE_C_HEAP_ARRAY(char, _source_debug_extension);
3378
3379 if (release_sub_metadata) {
3380 constants()->release_C_heap_structures();
3381 }
3382 }
3383
3384 // The constant pool is on stack if any of the methods are executing or
3385 // referenced by handles.
3386 bool InstanceKlass::on_stack() const {
3387 return _constants->on_stack();
3388 }
3389
3390 Symbol* InstanceKlass::source_file_name() const { return _constants->source_file_name(); }
3391 u2 InstanceKlass::source_file_name_index() const { return _constants->source_file_name_index(); }
3392 void InstanceKlass::set_source_file_name_index(u2 sourcefile_index) { _constants->set_source_file_name_index(sourcefile_index); }
3393
3394 // minor and major version numbers of class file
3395 u2 InstanceKlass::minor_version() const { return _constants->minor_version(); }
3396 void InstanceKlass::set_minor_version(u2 minor_version) { _constants->set_minor_version(minor_version); }
3397 u2 InstanceKlass::major_version() const { return _constants->major_version(); }
3398 void InstanceKlass::set_major_version(u2 major_version) { _constants->set_major_version(major_version); }
3399
3400 bool InstanceKlass::supports_inline_types() const {
3401 return major_version() >= Verifier::VALUE_TYPES_MAJOR_VERSION && minor_version() == Verifier::JAVA_PREVIEW_MINOR_VERSION;
3402 }
3403
3404 const InstanceKlass* InstanceKlass::get_klass_version(int version) const {
3405 for (const InstanceKlass* ik = this; ik != nullptr; ik = ik->previous_versions()) {
3406 if (ik->constants()->version() == version) {
3407 return ik;
3408 }
3409 }
3410 return nullptr;
3411 }
3412
3413 void InstanceKlass::set_source_debug_extension(const char* array, int length) {
3414 if (array == nullptr) {
3415 _source_debug_extension = nullptr;
3416 } else {
3417 // Adding one to the attribute length in order to store a null terminator
3418 // character could cause an overflow because the attribute length is
3419 // already coded with an u4 in the classfile, but in practice, it's
3420 // unlikely to happen.
3421 assert((length+1) > length, "Overflow checking");
3422 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
3423 for (int i = 0; i < length; i++) {
3424 sde[i] = array[i];
3425 }
3426 sde[length] = '\0';
3427 _source_debug_extension = sde;
3428 }
3429 }
3430
3431 Symbol* InstanceKlass::generic_signature() const { return _constants->generic_signature(); }
3432 u2 InstanceKlass::generic_signature_index() const { return _constants->generic_signature_index(); }
3433 void InstanceKlass::set_generic_signature_index(u2 sig_index) { _constants->set_generic_signature_index(sig_index); }
3434
3435 const char* InstanceKlass::signature_name() const {
3436 return signature_name_of_carrier(JVM_SIGNATURE_CLASS);
3437 }
3438
3439 const char* InstanceKlass::signature_name_of_carrier(char c) const {
3440 // Get the internal name as a c string
3441 const char* src = (const char*) (name()->as_C_string());
3442 const int src_length = (int)strlen(src);
3443
3444 char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
3445
3446 // Add L or Q as type indicator
3447 int dest_index = 0;
3448 dest[dest_index++] = c;
3449
3450 // Add the actual class name
3451 for (int src_index = 0; src_index < src_length; ) {
3452 dest[dest_index++] = src[src_index++];
3453 }
3454
3455 if (is_hidden()) { // Replace the last '+' with a '.'.
3456 for (int index = (int)src_length; index > 0; index--) {
3457 if (dest[index] == '+') {
3458 dest[index] = JVM_SIGNATURE_DOT;
3459 break;
3460 }
3461 }
3462 }
3463
3464 // Add the semicolon and the null
3465 dest[dest_index++] = JVM_SIGNATURE_ENDCLASS;
3466 dest[dest_index] = '\0';
3467 return dest;
3468 }
3469
3470 ModuleEntry* InstanceKlass::module() const {
3471 if (is_hidden() &&
3472 in_unnamed_package() &&
3473 class_loader_data()->has_class_mirror_holder()) {
3474 // For a non-strong hidden class defined to an unnamed package,
3475 // its (class held) CLD will not have an unnamed module created for it.
3476 // Two choices to find the correct ModuleEntry:
3477 // 1. If hidden class is within a nest, use nest host's module
3478 // 2. Find the unnamed module off from the class loader
3479 // For now option #2 is used since a nest host is not set until
3480 // after the instance class is created in jvm_lookup_define_class().
3481 if (class_loader_data()->is_boot_class_loader_data()) {
3482 return ClassLoaderData::the_null_class_loader_data()->unnamed_module();
3483 } else {
3484 oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader());
3485 assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module");
3486 return java_lang_Module::module_entry(module);
3487 }
3488 }
3489
3490 // Class is in a named package
3491 if (!in_unnamed_package()) {
3492 return _package_entry->module();
3493 }
3494
3495 // Class is in an unnamed package, return its loader's unnamed module
3496 return class_loader_data()->unnamed_module();
3497 }
3498
3499 bool InstanceKlass::in_javabase_module() const {
3500 return module()->name() == vmSymbols::java_base();
3501 }
3502
3503 void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPS) {
3504
3505 // ensure java/ packages only loaded by boot or platform builtin loaders
3506 // not needed for shared class since CDS does not archive prohibited classes.
3507 if (!in_aot_cache()) {
3508 check_prohibited_package(name(), loader_data, CHECK);
3509 }
3510
3511 if (in_aot_cache() && _package_entry != nullptr) {
3512 if (CDSConfig::is_using_full_module_graph() && _package_entry == pkg_entry) {
3513 // we can use the saved package
3514 assert(AOTMetaspace::in_aot_cache(_package_entry), "must be");
3515 return;
3516 } else {
3517 _package_entry = nullptr;
3518 }
3519 }
3520
3521 // ClassLoader::package_from_class_name has already incremented the refcount of the symbol
3522 // it returns, so we need to decrement it when the current function exits.
3523 TempNewSymbol from_class_name =
3524 (pkg_entry != nullptr) ? nullptr : ClassLoader::package_from_class_name(name());
3525
3526 Symbol* pkg_name;
3527 if (pkg_entry != nullptr) {
3528 pkg_name = pkg_entry->name();
3529 } else {
3530 pkg_name = from_class_name;
3531 }
3532
3533 if (pkg_name != nullptr && loader_data != nullptr) {
3534
3535 // Find in class loader's package entry table.
3536 _package_entry = pkg_entry != nullptr ? pkg_entry : loader_data->packages()->lookup_only(pkg_name);
3537
3538 // If the package name is not found in the loader's package
3539 // entry table, it is an indication that the package has not
3540 // been defined. Consider it defined within the unnamed module.
3541 if (_package_entry == nullptr) {
3542
3543 if (!ModuleEntryTable::javabase_defined()) {
3544 // Before java.base is defined during bootstrapping, define all packages in
3545 // the java.base module. If a non-java.base package is erroneously placed
3546 // in the java.base module it will be caught later when java.base
3547 // is defined by ModuleEntryTable::verify_javabase_packages check.
3548 assert(ModuleEntryTable::javabase_moduleEntry() != nullptr, JAVA_BASE_NAME " module is null");
3549 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, ModuleEntryTable::javabase_moduleEntry());
3550 } else {
3551 assert(loader_data->unnamed_module() != nullptr, "unnamed module is null");
3552 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, loader_data->unnamed_module());
3553 }
3554
3555 // A package should have been successfully created
3556 DEBUG_ONLY(ResourceMark rm(THREAD));
3557 assert(_package_entry != nullptr, "Package entry for class %s not found, loader %s",
3558 name()->as_C_string(), loader_data->loader_name_and_id());
3559 }
3560
3561 if (log_is_enabled(Debug, module)) {
3562 ResourceMark rm(THREAD);
3563 ModuleEntry* m = _package_entry->module();
3564 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s",
3565 external_name(),
3566 pkg_name->as_C_string(),
3567 loader_data->loader_name_and_id(),
3568 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE));
3569 }
3570 } else {
3571 ResourceMark rm(THREAD);
3572 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
3573 external_name(),
3574 (loader_data != nullptr) ? loader_data->loader_name_and_id() : "null",
3575 UNNAMED_MODULE);
3576 }
3577 }
3578
3579 // Function set_classpath_index ensures that for a non-null _package_entry
3580 // of the InstanceKlass, the entry is in the boot loader's package entry table.
3581 // It then sets the classpath_index in the package entry record.
3582 //
3583 // The classpath_index field is used to find the entry on the boot loader class
3584 // path for packages with classes loaded by the boot loader from -Xbootclasspath/a
3585 // in an unnamed module. It is also used to indicate (for all packages whose
3586 // classes are loaded by the boot loader) that at least one of the package's
3587 // classes has been loaded.
3588 void InstanceKlass::set_classpath_index(s2 path_index) {
3589 if (_package_entry != nullptr) {
3590 DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();)
3591 assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same");
3592 assert(path_index != -1, "Unexpected classpath_index");
3593 _package_entry->set_classpath_index(path_index);
3594 }
3595 }
3596
3597 // different versions of is_same_class_package
3598
3599 bool InstanceKlass::is_same_class_package(const Klass* class2) const {
3600 oop classloader1 = this->class_loader();
3601 PackageEntry* classpkg1 = this->package();
3602 if (class2->is_objArray_klass()) {
3603 class2 = ObjArrayKlass::cast(class2)->bottom_klass();
3604 }
3605
3606 oop classloader2;
3607 PackageEntry* classpkg2;
3608 if (class2->is_instance_klass()) {
3609 classloader2 = class2->class_loader();
3610 classpkg2 = class2->package();
3611 } else {
3612 assert(class2->is_typeArray_klass(), "should be type array");
3613 classloader2 = nullptr;
3614 classpkg2 = nullptr;
3615 }
3616
3617 // Same package is determined by comparing class loader
3618 // and package entries. Both must be the same. This rule
3619 // applies even to classes that are defined in the unnamed
3620 // package, they still must have the same class loader.
3621 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
3622 return true;
3623 }
3624
3625 return false;
3626 }
3627
3628 // return true if this class and other_class are in the same package. Classloader
3629 // and classname information is enough to determine a class's package
3630 bool InstanceKlass::is_same_class_package(oop other_class_loader,
3631 const Symbol* other_class_name) const {
3632 if (class_loader() != other_class_loader) {
3633 return false;
3634 }
3635 if (name()->fast_compare(other_class_name) == 0) {
3636 return true;
3637 }
3638
3639 {
3640 ResourceMark rm;
3641
3642 bool bad_class_name = false;
3643 TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name);
3644 if (bad_class_name) {
3645 return false;
3646 }
3647 // Check that package_from_class_name() returns null, not "", if there is no package.
3648 assert(other_pkg == nullptr || other_pkg->utf8_length() > 0, "package name is empty string");
3649
3650 const Symbol* const this_package_name =
3651 this->package() != nullptr ? this->package()->name() : nullptr;
3652
3653 if (this_package_name == nullptr || other_pkg == nullptr) {
3654 // One of the two doesn't have a package. Only return true if the other
3655 // one also doesn't have a package.
3656 return this_package_name == other_pkg;
3657 }
3658
3659 // Check if package is identical
3660 return this_package_name->fast_compare(other_pkg) == 0;
3661 }
3662 }
3663
3664 static bool is_prohibited_package_slow(Symbol* class_name) {
3665 // Caller has ResourceMark
3666 int length;
3667 jchar* unicode = class_name->as_unicode(length);
3668 return (length >= 5 &&
3669 unicode[0] == 'j' &&
3670 unicode[1] == 'a' &&
3671 unicode[2] == 'v' &&
3672 unicode[3] == 'a' &&
3673 unicode[4] == '/');
3674 }
3675
3676 // Only boot and platform class loaders can define classes in "java/" packages.
3677 void InstanceKlass::check_prohibited_package(Symbol* class_name,
3678 ClassLoaderData* loader_data,
3679 TRAPS) {
3680 if (!loader_data->is_boot_class_loader_data() &&
3681 !loader_data->is_platform_class_loader_data() &&
3682 class_name != nullptr && class_name->utf8_length() >= 5) {
3683 ResourceMark rm(THREAD);
3684 bool prohibited;
3685 const u1* base = class_name->base();
3686 if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) {
3687 prohibited = is_prohibited_package_slow(class_name);
3688 } else {
3689 char* name = class_name->as_C_string();
3690 prohibited = (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/');
3691 }
3692 if (prohibited) {
3693 TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name);
3694 assert(pkg_name != nullptr, "Error in parsing package name starting with 'java/'");
3695 char* name = pkg_name->as_C_string();
3696 const char* class_loader_name = loader_data->loader_name_and_id();
3697 StringUtils::replace_no_expand(name, "/", ".");
3698 const char* msg_text1 = "Class loader (instance of): ";
3699 const char* msg_text2 = " tried to load prohibited package name: ";
3700 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
3701 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len);
3702 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
3703 THROW_MSG(vmSymbols::java_lang_SecurityException(), message);
3704 }
3705 }
3706 return;
3707 }
3708
3709 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const {
3710 constantPoolHandle i_cp(THREAD, constants());
3711 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
3712 int ioff = iter.inner_class_info_index();
3713 if (ioff != 0) {
3714 // Check to see if the name matches the class we're looking for
3715 // before attempting to find the class.
3716 if (i_cp->klass_name_at_matches(this, ioff)) {
3717 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
3718 if (this == inner_klass) {
3719 *ooff = iter.outer_class_info_index();
3720 *noff = iter.inner_name_index();
3721 return true;
3722 }
3723 }
3724 }
3725 }
3726 return false;
3727 }
3728
3729 void InstanceKlass::check_can_be_annotated_with_NullRestricted(InstanceKlass* type, Symbol* container_klass_name, TRAPS) {
3730 assert(type->is_instance_klass(), "Sanity check");
3731 if (type->is_identity_class()) {
3732 ResourceMark rm(THREAD);
3733 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(),
3734 err_msg("Class %s expects class %s to be a value class, but it is an identity class",
3735 container_klass_name->as_C_string(),
3736 type->external_name()));
3737 }
3738
3739 if (type->is_abstract()) {
3740 ResourceMark rm(THREAD);
3741 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(),
3742 err_msg("Class %s expects class %s to be concrete value type, but it is an abstract class",
3743 container_klass_name->as_C_string(),
3744 type->external_name()));
3745 }
3746 }
3747
3748 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const {
3749 InstanceKlass* outer_klass = nullptr;
3750 *inner_is_member = false;
3751 int ooff = 0, noff = 0;
3752 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD);
3753 if (has_inner_classes_attr) {
3754 constantPoolHandle i_cp(THREAD, constants());
3755 if (ooff != 0) {
3756 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);
3757 if (!ok->is_instance_klass()) {
3758 // If the outer class is not an instance klass then it cannot have
3759 // declared any inner classes.
3760 ResourceMark rm(THREAD);
3761 // Names are all known to be < 64k so we know this formatted message is not excessively large.
3762 Exceptions::fthrow(
3763 THREAD_AND_LOCATION,
3764 vmSymbols::java_lang_IncompatibleClassChangeError(),
3765 "%s and %s disagree on InnerClasses attribute",
3766 ok->external_name(),
3767 external_name());
3768 return nullptr;
3769 }
3770 outer_klass = InstanceKlass::cast(ok);
3771 *inner_is_member = true;
3772 }
3773 if (nullptr == outer_klass) {
3774 // It may be a local class; try for that.
3775 int encl_method_class_idx = enclosing_method_class_index();
3776 if (encl_method_class_idx != 0) {
3777 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL);
3778 outer_klass = InstanceKlass::cast(ok);
3779 *inner_is_member = false;
3780 }
3781 }
3782 }
3783
3784 // If no inner class attribute found for this class.
3785 if (nullptr == outer_klass) return nullptr;
3786
3787 // Throws an exception if outer klass has not declared k as an inner klass
3788 // We need evidence that each klass knows about the other, or else
3789 // the system could allow a spoof of an inner class to gain access rights.
3790 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL);
3791 return outer_klass;
3792 }
3793
3794 u2 InstanceKlass::compute_modifier_flags() const {
3795 u2 access = access_flags().as_unsigned_short();
3796
3797 // But check if it happens to be member class.
3798 InnerClassesIterator iter(this);
3799 for (; !iter.done(); iter.next()) {
3800 int ioff = iter.inner_class_info_index();
3801 // Inner class attribute can be zero, skip it.
3802 // Strange but true: JVM spec. allows null inner class refs.
3803 if (ioff == 0) continue;
3804
3805 // only look at classes that are already loaded
3806 // since we are looking for the flags for our self.
3807 Symbol* inner_name = constants()->klass_name_at(ioff);
3808 if (name() == inner_name) {
3809 // This is really a member class.
3810 access = iter.inner_access_flags();
3811 break;
3812 }
3813 }
3814 if (!Arguments::is_valhalla_enabled()) {
3815 // Remember to strip ACC_SUPER bit without Valhalla
3816 access &= (~JVM_ACC_SUPER);
3817 }
3818 return access;
3819 }
3820
3821 jint InstanceKlass::jvmti_class_status() const {
3822 jint result = 0;
3823
3824 if (is_linked()) {
3825 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
3826 }
3827
3828 if (is_initialized()) {
3829 assert(is_linked(), "Class status is not consistent");
3830 result |= JVMTI_CLASS_STATUS_INITIALIZED;
3831 }
3832 if (is_in_error_state()) {
3833 result |= JVMTI_CLASS_STATUS_ERROR;
3834 }
3835 return result;
3836 }
3837
3838 Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPS) {
3839 bool implements_interface; // initialized by method_at_itable_or_null
3840 Method* m = method_at_itable_or_null(holder, index,
3841 implements_interface); // out parameter
3842 if (m != nullptr) {
3843 assert(implements_interface, "sanity");
3844 return m;
3845 } else if (implements_interface) {
3846 // Throw AbstractMethodError since corresponding itable slot is empty.
3847 THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
3848 } else {
3849 // If the interface isn't implemented by the receiver class,
3850 // the VM should throw IncompatibleClassChangeError.
3851 ResourceMark rm(THREAD);
3852 stringStream ss;
3853 bool same_module = (module() == holder->module());
3854 ss.print("Receiver class %s does not implement "
3855 "the interface %s defining the method to be called "
3856 "(%s%s%s)",
3857 external_name(), holder->external_name(),
3858 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
3859 (same_module) ? "" : "; ",
3860 (same_module) ? "" : holder->class_in_module_of_loader());
3861 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string());
3862 }
3863 }
3864
3865 Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) {
3866 klassItable itable(this);
3867 for (int i = 0; i < itable.size_offset_table(); i++) {
3868 itableOffsetEntry* offset_entry = itable.offset_entry(i);
3869 if (offset_entry->interface_klass() == holder) {
3870 implements_interface = true;
3871 itableMethodEntry* ime = offset_entry->first_method_entry(this);
3872 Method* m = ime[index].method();
3873 return m;
3874 }
3875 }
3876 implements_interface = false;
3877 return nullptr; // offset entry not found
3878 }
3879
3880 int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) {
3881 assert(is_linked(), "required");
3882 assert(intf_method->method_holder()->is_interface(), "not an interface method");
3883 assert(is_subtype_of(intf_method->method_holder()), "interface not implemented");
3884
3885 int vtable_index = Method::invalid_vtable_index;
3886 Symbol* name = intf_method->name();
3887 Symbol* signature = intf_method->signature();
3888
3889 // First check in default method array
3890 if (!intf_method->is_abstract() && default_methods() != nullptr) {
3891 int index = find_method_index(default_methods(),
3892 name, signature,
3893 Klass::OverpassLookupMode::find,
3894 Klass::StaticLookupMode::find,
3895 Klass::PrivateLookupMode::find);
3896 if (index >= 0) {
3897 vtable_index = default_vtable_indices()->at(index);
3898 }
3899 }
3900 if (vtable_index == Method::invalid_vtable_index) {
3901 // get vtable_index for miranda methods
3902 klassVtable vt = vtable();
3903 vtable_index = vt.index_of_miranda(name, signature);
3904 }
3905 return vtable_index;
3906 }
3907
3908 #if INCLUDE_JVMTI
3909 // update default_methods for redefineclasses for methods that are
3910 // not yet in the vtable due to concurrent subclass define and superinterface
3911 // redefinition
3912 // Note: those in the vtable, should have been updated via adjust_method_entries
3913 void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
3914 // search the default_methods for uses of either obsolete or EMCP methods
3915 if (default_methods() != nullptr) {
3916 for (int index = 0; index < default_methods()->length(); index ++) {
3917 Method* old_method = default_methods()->at(index);
3918 if (old_method == nullptr || !old_method->is_old()) {
3919 continue; // skip uninteresting entries
3920 }
3921 assert(!old_method->is_deleted(), "default methods may not be deleted");
3922 Method* new_method = old_method->get_new_method();
3923 default_methods()->at_put(index, new_method);
3924
3925 if (log_is_enabled(Info, redefine, class, update)) {
3926 ResourceMark rm;
3927 if (!(*trace_name_printed)) {
3928 log_info(redefine, class, update)
3929 ("adjust: klassname=%s default methods from name=%s",
3930 external_name(), old_method->method_holder()->external_name());
3931 *trace_name_printed = true;
3932 }
3933 log_debug(redefine, class, update, vtables)
3934 ("default method update: %s(%s) ",
3935 new_method->name()->as_C_string(), new_method->signature()->as_C_string());
3936 }
3937 }
3938 }
3939 }
3940 #endif // INCLUDE_JVMTI
3941
3942 // On-stack replacement stuff
3943 void InstanceKlass::add_osr_nmethod(nmethod* n) {
3944 assert_lock_strong(NMethodState_lock);
3945 #ifndef PRODUCT
3946 nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3947 assert(prev == nullptr || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation),
3948 "redundant OSR recompilation detected. memory leak in CodeCache!");
3949 #endif
3950 // only one compilation can be active
3951 assert(n->is_osr_method(), "wrong kind of nmethod");
3952 n->set_osr_link(osr_nmethods_head());
3953 set_osr_nmethods_head(n);
3954 // Raise the highest osr level if necessary
3955 n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level()));
3956
3957 // Get rid of the osr methods for the same bci that have lower levels.
3958 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3959 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3960 if (inv != nullptr && inv->is_in_use()) {
3961 inv->make_not_entrant(nmethod::InvalidationReason::OSR_INVALIDATION_OF_LOWER_LEVEL);
3962 }
3963 }
3964 }
3965
3966 // Remove osr nmethod from the list. Return true if found and removed.
3967 bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3968 // This is a short non-blocking critical region, so the no safepoint check is ok.
3969 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3970 assert(n->is_osr_method(), "wrong kind of nmethod");
3971 nmethod* last = nullptr;
3972 nmethod* cur = osr_nmethods_head();
3973 int max_level = CompLevel_none; // Find the max comp level excluding n
3974 Method* m = n->method();
3975 // Search for match
3976 bool found = false;
3977 while(cur != nullptr && cur != n) {
3978 if (m == cur->method()) {
3979 // Find max level before n
3980 max_level = MAX2(max_level, cur->comp_level());
3981 }
3982 last = cur;
3983 cur = cur->osr_link();
3984 }
3985 nmethod* next = nullptr;
3986 if (cur == n) {
3987 found = true;
3988 next = cur->osr_link();
3989 if (last == nullptr) {
3990 // Remove first element
3991 set_osr_nmethods_head(next);
3992 } else {
3993 last->set_osr_link(next);
3994 }
3995 }
3996 n->set_osr_link(nullptr);
3997 cur = next;
3998 while (cur != nullptr) {
3999 // Find max level after n
4000 if (m == cur->method()) {
4001 max_level = MAX2(max_level, cur->comp_level());
4002 }
4003 cur = cur->osr_link();
4004 }
4005 m->set_highest_osr_comp_level(max_level);
4006 return found;
4007 }
4008
4009 int InstanceKlass::mark_osr_nmethods(DeoptimizationScope* deopt_scope, const Method* m) {
4010 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
4011 nmethod* osr = osr_nmethods_head();
4012 int found = 0;
4013 while (osr != nullptr) {
4014 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
4015 if (osr->method() == m) {
4016 deopt_scope->mark(osr);
4017 found++;
4018 }
4019 osr = osr->osr_link();
4020 }
4021 return found;
4022 }
4023
4024 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
4025 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
4026 nmethod* osr = osr_nmethods_head();
4027 nmethod* best = nullptr;
4028 while (osr != nullptr) {
4029 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
4030 // There can be a time when a c1 osr method exists but we are waiting
4031 // for a c2 version. When c2 completes its osr nmethod we will trash
4032 // the c1 version and only be able to find the c2 version. However
4033 // while we overflow in the c1 code at back branches we don't want to
4034 // try and switch to the same code as we are already running
4035
4036 if (osr->method() == m &&
4037 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
4038 if (match_level) {
4039 if (osr->comp_level() == comp_level) {
4040 // Found a match - return it.
4041 return osr;
4042 }
4043 } else {
4044 if (best == nullptr || (osr->comp_level() > best->comp_level())) {
4045 if (osr->comp_level() == CompilationPolicy::highest_compile_level()) {
4046 // Found the best possible - return it.
4047 return osr;
4048 }
4049 best = osr;
4050 }
4051 }
4052 }
4053 osr = osr->osr_link();
4054 }
4055
4056 assert(match_level == false || best == nullptr, "shouldn't pick up anything if match_level is set");
4057 if (best != nullptr && best->comp_level() >= comp_level) {
4058 return best;
4059 }
4060 return nullptr;
4061 }
4062
4063 // -----------------------------------------------------------------------------------------------------
4064 // Printing
4065
4066 #define BULLET " - "
4067
4068 static const char* state_names[] = {
4069 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
4070 };
4071
4072 static void print_vtable(address self, intptr_t* start, int len, outputStream* st) {
4073 ResourceMark rm;
4074 int* forward_refs = NEW_RESOURCE_ARRAY(int, len);
4075 for (int i = 0; i < len; i++) forward_refs[i] = 0;
4076 for (int i = 0; i < len; i++) {
4077 intptr_t e = start[i];
4078 st->print("%d : " INTPTR_FORMAT, i, e);
4079 if (forward_refs[i] != 0) {
4080 int from = forward_refs[i];
4081 int off = (int) start[from];
4082 st->print(" (offset %d <= [%d])", off, from);
4083 }
4084 if (MetaspaceObj::is_valid((Metadata*)e)) {
4085 st->print(" ");
4086 ((Metadata*)e)->print_value_on(st);
4087 } else if (self != nullptr && e > 0 && e < 0x10000) {
4088 address location = self + e;
4089 int index = (int)((intptr_t*)location - start);
4090 st->print(" (offset %d => [%d])", (int)e, index);
4091 if (index >= 0 && index < len)
4092 forward_refs[index] = i;
4093 }
4094 st->cr();
4095 }
4096 }
4097
4098 static void print_vtable(vtableEntry* start, int len, outputStream* st) {
4099 return print_vtable(nullptr, reinterpret_cast<intptr_t*>(start), len, st);
4100 }
4101
4102 const char* InstanceKlass::init_state_name() const {
4103 return state_names[init_state()];
4104 }
4105
4106 void InstanceKlass::print_on(outputStream* st) const {
4107 assert(is_klass(), "must be klass");
4108 Klass::print_on(st);
4109
4110 st->print(BULLET"instance size: %d", size_helper()); st->cr();
4111 st->print(BULLET"klass size: %d", size()); st->cr();
4112 st->print(BULLET"access: "); access_flags().print_on(st); st->cr();
4113 st->print(BULLET"flags: "); _misc_flags.print_on(st); st->cr();
4114 st->print(BULLET"state: "); st->print_cr("%s", init_state_name());
4115 st->print(BULLET"name: "); name()->print_value_on(st); st->cr();
4116 st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
4117 st->print(BULLET"sub: ");
4118 Klass* sub = subklass();
4119 int n;
4120 for (n = 0; sub != nullptr; n++, sub = sub->next_sibling()) {
4121 if (n < MaxSubklassPrintSize) {
4122 sub->print_value_on(st);
4123 st->print(" ");
4124 }
4125 }
4126 if (n >= MaxSubklassPrintSize) st->print("(%zd more klasses...)", n - MaxSubklassPrintSize);
4127 st->cr();
4128
4129 if (is_interface()) {
4130 st->print_cr(BULLET"nof implementors: %d", nof_implementors());
4131 if (nof_implementors() == 1) {
4132 st->print_cr(BULLET"implementor: ");
4133 st->print(" ");
4134 implementor()->print_value_on(st);
4135 st->cr();
4136 }
4137 }
4138
4139 st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
4140 st->print(BULLET"methods: ");
4141 print_array_on(st, methods(), [](outputStream* ost, Method* method) {
4142 method->print_value_on(ost);
4143 });
4144 st->print(BULLET"method ordering: ");
4145 print_array_on(st, method_ordering(), [](outputStream* ost, int i) {
4146 ost->print("%d", i);
4147 });
4148 if (default_methods() != nullptr) {
4149 st->print(BULLET"default_methods: ");
4150 print_array_on(st, default_methods(), [](outputStream* ost, Method* method) {
4151 method->print_value_on(ost);
4152 });
4153 }
4154 print_on_maybe_null(st, BULLET"default vtable indices: ", default_vtable_indices());
4155 st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr();
4156 st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
4157
4158 st->print(BULLET"secondary supers: "); secondary_supers()->print_value_on(st); st->cr();
4159
4160 st->print(BULLET"hash_slot: %d", hash_slot()); st->cr();
4161 st->print(BULLET"secondary bitmap: " UINTX_FORMAT_X_0, _secondary_supers_bitmap); st->cr();
4162
4163 if (secondary_supers() != nullptr) {
4164 if (Verbose) {
4165 bool is_hashed = (_secondary_supers_bitmap != SECONDARY_SUPERS_BITMAP_FULL);
4166 st->print_cr(BULLET"---- secondary supers (%d words):", _secondary_supers->length());
4167 for (int i = 0; i < _secondary_supers->length(); i++) {
4168 ResourceMark rm; // for external_name()
4169 Klass* secondary_super = _secondary_supers->at(i);
4170 st->print(BULLET"%2d:", i);
4171 if (is_hashed) {
4172 int home_slot = compute_home_slot(secondary_super, _secondary_supers_bitmap);
4173 int distance = (i - home_slot) & SECONDARY_SUPERS_TABLE_MASK;
4174 st->print(" dist:%02d:", distance);
4175 }
4176 st->print_cr(" %p %s", secondary_super, secondary_super->external_name());
4177 }
4178 }
4179 }
4180 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr();
4181
4182 print_on_maybe_null(st, BULLET"class loader data: ", class_loader_data());
4183 print_on_maybe_null(st, BULLET"source file: ", source_file_name());
4184 if (source_debug_extension() != nullptr) {
4185 st->print(BULLET"source debug extension: ");
4186 st->print("%s", source_debug_extension());
4187 st->cr();
4188 }
4189 print_on_maybe_null(st, BULLET"class annotations: ", class_annotations());
4190 print_on_maybe_null(st, BULLET"class type annotations: ", class_type_annotations());
4191 print_on_maybe_null(st, BULLET"field annotations: ", fields_annotations());
4192 print_on_maybe_null(st, BULLET"field type annotations: ", fields_type_annotations());
4193 {
4194 bool have_pv = false;
4195 // previous versions are linked together through the InstanceKlass
4196 for (InstanceKlass* pv_node = previous_versions();
4197 pv_node != nullptr;
4198 pv_node = pv_node->previous_versions()) {
4199 if (!have_pv)
4200 st->print(BULLET"previous version: ");
4201 have_pv = true;
4202 pv_node->constants()->print_value_on(st);
4203 }
4204 if (have_pv) st->cr();
4205 }
4206
4207 print_on_maybe_null(st, BULLET"generic signature: ", generic_signature());
4208 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr();
4209 st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr();
4210 print_on_maybe_null(st, BULLET"record components: ", record_components());
4211 st->print(BULLET"permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr();
4212 st->print(BULLET"loadable descriptors: "); loadable_descriptors()->print_value_on(st); st->cr();
4213 if (java_mirror() != nullptr) {
4214 st->print(BULLET"java mirror: ");
4215 java_mirror()->print_value_on(st);
4216 st->cr();
4217 } else {
4218 st->print_cr(BULLET"java mirror: null");
4219 }
4220 st->print(BULLET"vtable length %d (start addr: " PTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr();
4221 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st);
4222 st->print(BULLET"itable length %d (start addr: " PTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr();
4223 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(nullptr, start_of_itable(), itable_length(), st);
4224
4225 InstanceKlass* ik = const_cast<InstanceKlass*>(this);
4226 // There is no oop so static and nonstatic printing can use the same printer.
4227 FieldPrinter field_printer(st);
4228 st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
4229 ik->do_local_static_fields(&field_printer);
4230 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
4231 ik->print_nonstatic_fields(&field_printer);
4232
4233 st->print(BULLET"non-static oop maps (%d entries): ", nonstatic_oop_map_count());
4234 OopMapBlock* map = start_of_nonstatic_oop_maps();
4235 OopMapBlock* end_map = map + nonstatic_oop_map_count();
4236 while (map < end_map) {
4237 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
4238 map++;
4239 }
4240 st->cr();
4241
4242 if (fieldinfo_search_table() != nullptr) {
4243 st->print_cr(BULLET"---- field info search table:");
4244 FieldInfoStream::print_search_table(st, _constants, _fieldinfo_stream, _fieldinfo_search_table);
4245 }
4246 }
4247
4248 void InstanceKlass::print_value_on(outputStream* st) const {
4249 assert(is_klass(), "must be klass");
4250 if (Verbose || WizardMode) access_flags().print_on(st);
4251 name()->print_value_on(st);
4252 }
4253
4254 void FieldPrinter::do_field(fieldDescriptor* fd) {
4255 for (int i = 0; i < _indent; i++) _st->print(" ");
4256 _st->print(BULLET);
4257 // Handles the cases of static fields or instance fields but no oop is given.
4258 if (_obj == nullptr) {
4259 fd->print_on(_st, _base_offset);
4260 _st->cr();
4261 } else {
4262 fd->print_on_for(_st, _obj, _indent, _base_offset);
4263 if (!fd->field_flags().is_flat()) _st->cr();
4264 }
4265 }
4266
4267
4268 void InstanceKlass::oop_print_on(oop obj, outputStream* st, int indent, int base_offset) {
4269 Klass::oop_print_on(obj, st);
4270
4271 if (this == vmClasses::String_klass()) {
4272 typeArrayOop value = java_lang_String::value(obj);
4273 juint length = java_lang_String::length(obj);
4274 if (value != nullptr &&
4275 value->is_typeArray() &&
4276 length <= (juint) value->length()) {
4277 st->print(BULLET"string: ");
4278 java_lang_String::print(obj, st);
4279 st->cr();
4280 }
4281 }
4282
4283 st->print_cr(BULLET"---- fields (total size %zu words):", oop_size(obj));
4284 FieldPrinter print_field(st, obj, indent, base_offset);
4285 print_nonstatic_fields(&print_field);
4286
4287 if (this == vmClasses::Class_klass()) {
4288 st->print(BULLET"signature: ");
4289 java_lang_Class::print_signature(obj, st);
4290 st->cr();
4291 Klass* real_klass = java_lang_Class::as_Klass(obj);
4292 if (real_klass != nullptr && real_klass->is_instance_klass()) {
4293 st->print_cr(BULLET"---- static fields (%d):", java_lang_Class::static_oop_field_count(obj));
4294 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
4295 }
4296 } else if (this == vmClasses::MethodType_klass()) {
4297 st->print(BULLET"signature: ");
4298 java_lang_invoke_MethodType::print_signature(obj, st);
4299 st->cr();
4300 }
4301 }
4302
4303 #ifndef PRODUCT
4304
4305 bool InstanceKlass::verify_itable_index(int i) {
4306 int method_count = klassItable::method_count_for_interface(this);
4307 assert(i >= 0 && i < method_count, "index out of bounds");
4308 return true;
4309 }
4310
4311 #endif //PRODUCT
4312
4313 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
4314 st->print("a ");
4315 name()->print_value_on(st);
4316 obj->print_address_on(st);
4317 if (this == vmClasses::String_klass()
4318 && java_lang_String::value(obj) != nullptr) {
4319 ResourceMark rm;
4320 int len = java_lang_String::length(obj);
4321 int plen = (len < 24 ? len : 12);
4322 char* str = java_lang_String::as_utf8_string(obj, 0, plen);
4323 st->print(" = \"%s\"", str);
4324 if (len > plen)
4325 st->print("...[%d]", len);
4326 } else if (this == vmClasses::Class_klass()) {
4327 Klass* k = java_lang_Class::as_Klass(obj);
4328 st->print(" = ");
4329 if (k != nullptr) {
4330 k->print_value_on(st);
4331 } else {
4332 const char* tname = type2name(java_lang_Class::primitive_type(obj));
4333 st->print("%s", tname ? tname : "type?");
4334 }
4335 } else if (this == vmClasses::MethodType_klass()) {
4336 st->print(" = ");
4337 java_lang_invoke_MethodType::print_signature(obj, st);
4338 } else if (java_lang_boxing_object::is_instance(obj)) {
4339 st->print(" = ");
4340 java_lang_boxing_object::print(obj, st);
4341 } else if (this == vmClasses::LambdaForm_klass()) {
4342 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
4343 if (vmentry != nullptr) {
4344 st->print(" => ");
4345 vmentry->print_value_on(st);
4346 }
4347 } else if (this == vmClasses::MemberName_klass()) {
4348 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
4349 if (vmtarget != nullptr) {
4350 st->print(" = ");
4351 vmtarget->print_value_on(st);
4352 } else {
4353 oop clazz = java_lang_invoke_MemberName::clazz(obj);
4354 oop name = java_lang_invoke_MemberName::name(obj);
4355 if (clazz != nullptr) {
4356 clazz->print_value_on(st);
4357 } else {
4358 st->print("null");
4359 }
4360 st->print(".");
4361 if (name != nullptr) {
4362 name->print_value_on(st);
4363 } else {
4364 st->print("null");
4365 }
4366 }
4367 }
4368 }
4369
4370 const char* InstanceKlass::internal_name() const {
4371 return external_name();
4372 }
4373
4374 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
4375 const ModuleEntry* module_entry,
4376 const ClassFileStream* cfs) const {
4377
4378 if (ClassListWriter::is_enabled()) {
4379 ClassListWriter::write(this, cfs);
4380 }
4381
4382 print_class_load_helper(loader_data, module_entry, cfs);
4383 print_class_load_cause_logging();
4384 }
4385
4386 void InstanceKlass::print_class_load_helper(ClassLoaderData* loader_data,
4387 const ModuleEntry* module_entry,
4388 const ClassFileStream* cfs) const {
4389
4390 if (!log_is_enabled(Info, class, load)) {
4391 return;
4392 }
4393
4394 ResourceMark rm;
4395 LogMessage(class, load) msg;
4396 stringStream info_stream;
4397
4398 // Name and class hierarchy info
4399 info_stream.print("%s", external_name());
4400
4401 // Source
4402 if (cfs != nullptr) {
4403 if (cfs->source() != nullptr) {
4404 const char* module_name = (module_entry->name() == nullptr) ? UNNAMED_MODULE : module_entry->name()->as_C_string();
4405 if (module_name != nullptr) {
4406 // When the boot loader created the stream, it didn't know the module name
4407 // yet. Let's format it now.
4408 if (cfs->from_boot_loader_modules_image()) {
4409 info_stream.print(" source: jrt:/%s", module_name);
4410 } else {
4411 info_stream.print(" source: %s", cfs->source());
4412 }
4413 } else {
4414 info_stream.print(" source: %s", cfs->source());
4415 }
4416 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
4417 Thread* current = Thread::current();
4418 Klass* caller = current->is_Java_thread() ?
4419 JavaThread::cast(current)->security_get_caller_class(1):
4420 nullptr;
4421 // caller can be null, for example, during a JVMTI VM_Init hook
4422 if (caller != nullptr) {
4423 info_stream.print(" source: instance of %s", caller->external_name());
4424 } else {
4425 // source is unknown
4426 }
4427 } else {
4428 oop class_loader = loader_data->class_loader();
4429 info_stream.print(" source: %s", class_loader->klass()->external_name());
4430 }
4431 } else {
4432 assert(this->in_aot_cache(), "must be");
4433 if (AOTMetaspace::in_aot_cache_dynamic_region((void*)this)) {
4434 info_stream.print(" source: shared objects file (top)");
4435 } else {
4436 info_stream.print(" source: shared objects file");
4437 }
4438 }
4439
4440 msg.info("%s", info_stream.as_string());
4441
4442 if (log_is_enabled(Debug, class, load)) {
4443 stringStream debug_stream;
4444
4445 // Class hierarchy info
4446 debug_stream.print(" klass: " PTR_FORMAT " super: " PTR_FORMAT,
4447 p2i(this), p2i(super()));
4448
4449 // Interfaces
4450 if (local_interfaces() != nullptr && local_interfaces()->length() > 0) {
4451 debug_stream.print(" interfaces:");
4452 int length = local_interfaces()->length();
4453 for (int i = 0; i < length; i++) {
4454 debug_stream.print(" " PTR_FORMAT,
4455 p2i(local_interfaces()->at(i)));
4456 }
4457 }
4458
4459 // Class loader
4460 debug_stream.print(" loader: [");
4461 loader_data->print_value_on(&debug_stream);
4462 debug_stream.print("]");
4463
4464 // Classfile checksum
4465 if (cfs) {
4466 debug_stream.print(" bytes: %d checksum: %08x",
4467 cfs->length(),
4468 ClassLoader::crc32(0, (const char*)cfs->buffer(),
4469 cfs->length()));
4470 }
4471
4472 msg.debug("%s", debug_stream.as_string());
4473 }
4474 }
4475
4476 void InstanceKlass::print_class_load_cause_logging() const {
4477 bool log_cause_native = log_is_enabled(Info, class, load, cause, native);
4478 if (log_cause_native || log_is_enabled(Info, class, load, cause)) {
4479 JavaThread* current = JavaThread::current();
4480 ResourceMark rm(current);
4481 const char* name = external_name();
4482
4483 if (LogClassLoadingCauseFor == nullptr ||
4484 (strcmp("*", LogClassLoadingCauseFor) != 0 &&
4485 strstr(name, LogClassLoadingCauseFor) == nullptr)) {
4486 return;
4487 }
4488
4489 // Log Java stack first
4490 {
4491 LogMessage(class, load, cause) msg;
4492 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4493
4494 info_stream.print_cr("Java stack when loading %s:", name);
4495 current->print_stack_on(&info_stream);
4496 }
4497
4498 // Log native stack second
4499 if (log_cause_native) {
4500 // Log to string first so that lines can be indented
4501 stringStream stack_stream;
4502 char buf[O_BUFLEN];
4503 address lastpc = nullptr;
4504 NativeStackPrinter nsp(current);
4505 nsp.print_stack(&stack_stream, buf, sizeof(buf), lastpc,
4506 true /* print_source_info */, -1 /* max stack */);
4507
4508 LogMessage(class, load, cause, native) msg;
4509 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4510 info_stream.print_cr("Native stack when loading %s:", name);
4511
4512 // Print each native stack line to the log
4513 int size = (int) stack_stream.size();
4514 char* stack = stack_stream.as_string();
4515 char* stack_end = stack + size;
4516 char* line_start = stack;
4517 for (char* p = stack; p < stack_end; p++) {
4518 if (*p == '\n') {
4519 *p = '\0';
4520 info_stream.print_cr("\t%s", line_start);
4521 line_start = p + 1;
4522 }
4523 }
4524 if (line_start < stack_end) {
4525 info_stream.print_cr("\t%s", line_start);
4526 }
4527 }
4528 }
4529 }
4530
4531 // Verification
4532
4533 class VerifyFieldClosure: public BasicOopIterateClosure {
4534 protected:
4535 template <class T> void do_oop_work(T* p) {
4536 oop obj = RawAccess<>::oop_load(p);
4537 if (!oopDesc::is_oop_or_null(obj)) {
4538 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj));
4539 Universe::print_on(tty);
4540 guarantee(false, "boom");
4541 }
4542 }
4543 public:
4544 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); }
4545 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
4546 };
4547
4548 void InstanceKlass::verify_on(outputStream* st) {
4549 #ifndef PRODUCT
4550 // Avoid redundant verifies, this really should be in product.
4551 if (_verify_count == Universe::verify_count()) return;
4552 _verify_count = Universe::verify_count();
4553 #endif
4554
4555 // Verify Klass
4556 Klass::verify_on(st);
4557
4558 // Verify that klass is present in ClassLoaderData
4559 guarantee(class_loader_data()->contains_klass(this),
4560 "this class isn't found in class loader data");
4561
4562 // Verify vtables
4563 if (is_linked()) {
4564 // $$$ This used to be done only for m/s collections. Doing it
4565 // always seemed a valid generalization. (DLD -- 6/00)
4566 vtable().verify(st);
4567 }
4568
4569 // Verify first subklass
4570 if (subklass() != nullptr) {
4571 guarantee(subklass()->is_klass(), "should be klass");
4572 }
4573
4574 // Verify siblings
4575 Klass* super = this->super();
4576 Klass* sib = next_sibling();
4577 if (sib != nullptr) {
4578 if (sib == this) {
4579 fatal("subclass points to itself " PTR_FORMAT, p2i(sib));
4580 }
4581
4582 guarantee(sib->is_klass(), "should be klass");
4583 guarantee(sib->super() == super, "siblings should have same superklass");
4584 }
4585
4586 // Verify local interfaces
4587 if (local_interfaces()) {
4588 Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
4589 for (int j = 0; j < local_interfaces->length(); j++) {
4590 InstanceKlass* e = local_interfaces->at(j);
4591 guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
4592 }
4593 }
4594
4595 // Verify transitive interfaces
4596 if (transitive_interfaces() != nullptr) {
4597 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
4598 for (int j = 0; j < transitive_interfaces->length(); j++) {
4599 InstanceKlass* e = transitive_interfaces->at(j);
4600 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
4601 }
4602 }
4603
4604 // Verify methods
4605 if (methods() != nullptr) {
4606 Array<Method*>* methods = this->methods();
4607 for (int j = 0; j < methods->length(); j++) {
4608 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4609 }
4610 for (int j = 0; j < methods->length() - 1; j++) {
4611 Method* m1 = methods->at(j);
4612 Method* m2 = methods->at(j + 1);
4613 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4614 }
4615 }
4616
4617 // Verify method ordering
4618 if (method_ordering() != nullptr) {
4619 Array<int>* method_ordering = this->method_ordering();
4620 int length = method_ordering->length();
4621 if (JvmtiExport::can_maintain_original_method_order() ||
4622 ((CDSConfig::is_using_archive() || CDSConfig::is_dumping_archive()) && length != 0)) {
4623 guarantee(length == methods()->length(), "invalid method ordering length");
4624 jlong sum = 0;
4625 for (int j = 0; j < length; j++) {
4626 int original_index = method_ordering->at(j);
4627 guarantee(original_index >= 0, "invalid method ordering index");
4628 guarantee(original_index < length, "invalid method ordering index");
4629 sum += original_index;
4630 }
4631 // Verify sum of indices 0,1,...,length-1
4632 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
4633 } else {
4634 guarantee(length == 0, "invalid method ordering length");
4635 }
4636 }
4637
4638 // Verify default methods
4639 if (default_methods() != nullptr) {
4640 Array<Method*>* methods = this->default_methods();
4641 for (int j = 0; j < methods->length(); j++) {
4642 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4643 }
4644 for (int j = 0; j < methods->length() - 1; j++) {
4645 Method* m1 = methods->at(j);
4646 Method* m2 = methods->at(j + 1);
4647 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4648 }
4649 }
4650
4651 // Verify JNI static field identifiers
4652 if (jni_ids() != nullptr) {
4653 jni_ids()->verify(this);
4654 }
4655
4656 // Verify other fields
4657 if (constants() != nullptr) {
4658 guarantee(constants()->is_constantPool(), "should be constant pool");
4659 }
4660 }
4661
4662 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
4663 Klass::oop_verify_on(obj, st);
4664 VerifyFieldClosure blk;
4665 obj->oop_iterate(&blk);
4666 }
4667
4668 // JNIid class for jfieldIDs only
4669 // Note to reviewers:
4670 // These JNI functions are just moved over to column 1 and not changed
4671 // in the compressed oops workspace.
4672 JNIid::JNIid(InstanceKlass* holder, int offset, JNIid* next) {
4673 _holder = holder;
4674 _offset = offset;
4675 _next = next;
4676 DEBUG_ONLY(_is_static_field_id = false;)
4677 }
4678
4679 JNIid* JNIid::find(int offset) {
4680 JNIid* current = this;
4681 while (current != nullptr) {
4682 if (current->offset() == offset) return current;
4683 current = current->next();
4684 }
4685 return nullptr;
4686 }
4687
4688 void JNIid::deallocate(JNIid* current) {
4689 while (current != nullptr) {
4690 JNIid* next = current->next();
4691 delete current;
4692 current = next;
4693 }
4694 }
4695
4696 void JNIid::verify(InstanceKlass* holder) {
4697 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields();
4698 int end_field_offset;
4699 end_field_offset = first_field_offset + (holder->static_field_size() * wordSize);
4700
4701 JNIid* current = this;
4702 while (current != nullptr) {
4703 guarantee(current->holder() == holder, "Invalid klass in JNIid");
4704 #ifdef ASSERT
4705 int o = current->offset();
4706 if (current->is_static_field_id()) {
4707 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid");
4708 }
4709 #endif
4710 current = current->next();
4711 }
4712 }
4713
4714 void InstanceKlass::set_init_state(ClassState state) {
4715 #ifdef ASSERT
4716 bool good_state = in_aot_cache() ? (_init_state <= state)
4717 : (_init_state < state);
4718 assert(good_state || state == allocated, "illegal state transition");
4719 #endif
4720 assert(_init_thread == nullptr, "should be cleared before state change");
4721 AtomicAccess::release_store(&_init_state, state);
4722 }
4723
4724 #if INCLUDE_JVMTI
4725
4726 // RedefineClasses() support for previous versions
4727
4728 // Globally, there is at least one previous version of a class to walk
4729 // during class unloading, which is saved because old methods in the class
4730 // are still running. Otherwise the previous version list is cleaned up.
4731 bool InstanceKlass::_should_clean_previous_versions = false;
4732
4733 // Returns true if there are previous versions of a class for class
4734 // unloading only. Also resets the flag to false. purge_previous_version
4735 // will set the flag to true if there are any left, i.e., if there's any
4736 // work to do for next time. This is to avoid the expensive code cache
4737 // walk in CLDG::clean_deallocate_lists().
4738 bool InstanceKlass::should_clean_previous_versions_and_reset() {
4739 bool ret = _should_clean_previous_versions;
4740 log_trace(redefine, class, iklass, purge)("Class unloading: should_clean_previous_versions = %s",
4741 ret ? "true" : "false");
4742 _should_clean_previous_versions = false;
4743 return ret;
4744 }
4745
4746 // This nulls out the jmethodID for all obsolete methods in the previous version of the 'klass'.
4747 // These obsolete methods only exist in the previous version and we're about to delete the memory for them.
4748 // The jmethodID for these are deallocated when we unload the class, so this doesn't remove them from the table.
4749 void InstanceKlass::clear_obsolete_jmethod_ids(InstanceKlass* klass) {
4750 Array<Method*>* method_refs = klass->methods();
4751 for (int k = 0; k < method_refs->length(); k++) {
4752 Method* method = method_refs->at(k);
4753 // Only need to clear obsolete methods.
4754 if (method != nullptr && method->is_obsolete()) {
4755 method->clear_jmethod_id();
4756 }
4757 }
4758 }
4759
4760 // Purge previous versions before adding new previous versions of the class and
4761 // during class unloading.
4762 void InstanceKlass::purge_previous_version_list() {
4763 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
4764 assert(has_been_redefined(), "Should only be called for main class");
4765
4766 // Quick exit.
4767 if (previous_versions() == nullptr) {
4768 return;
4769 }
4770
4771 // This klass has previous versions so see what we can cleanup
4772 // while it is safe to do so.
4773
4774 int deleted_count = 0; // leave debugging breadcrumbs
4775 int live_count = 0;
4776 ClassLoaderData* loader_data = class_loader_data();
4777 assert(loader_data != nullptr, "should never be null");
4778
4779 ResourceMark rm;
4780 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name());
4781
4782 // previous versions are linked together through the InstanceKlass
4783 InstanceKlass* pv_node = previous_versions();
4784 InstanceKlass* last = this;
4785 int version = 0;
4786
4787 // check the previous versions list
4788 for (; pv_node != nullptr; ) {
4789
4790 ConstantPool* pvcp = pv_node->constants();
4791 assert(pvcp != nullptr, "cp ref was unexpectedly cleared");
4792
4793 if (!pvcp->on_stack()) {
4794 // If the constant pool isn't on stack, none of the methods
4795 // are executing. Unlink this previous_version.
4796 // The previous version InstanceKlass is on the ClassLoaderData deallocate list
4797 // so will be deallocated during the next phase of class unloading.
4798 log_trace(redefine, class, iklass, purge)
4799 ("previous version " PTR_FORMAT " is dead.", p2i(pv_node));
4800 // Unlink from previous version list.
4801 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data");
4802 InstanceKlass* next = pv_node->previous_versions();
4803 clear_obsolete_jmethod_ids(pv_node); // jmethodID maintenance for the unloaded class
4804 pv_node->link_previous_versions(nullptr); // point next to null
4805 last->link_previous_versions(next);
4806 // Delete this node directly. Nothing is referring to it and we don't
4807 // want it to increase the counter for metadata to delete in CLDG.
4808 MetadataFactory::free_metadata(loader_data, pv_node);
4809 pv_node = next;
4810 deleted_count++;
4811 version++;
4812 continue;
4813 } else {
4814 assert(pvcp->pool_holder() != nullptr, "Constant pool with no holder");
4815 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
4816 live_count++;
4817 if (pvcp->in_aot_cache()) {
4818 // Shared previous versions can never be removed so no cleaning is needed.
4819 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is shared", p2i(pv_node));
4820 } else {
4821 // Previous version alive, set that clean is needed for next time.
4822 _should_clean_previous_versions = true;
4823 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is alive", p2i(pv_node));
4824 }
4825 }
4826
4827 // next previous version
4828 last = pv_node;
4829 pv_node = pv_node->previous_versions();
4830 version++;
4831 }
4832 log_trace(redefine, class, iklass, purge)
4833 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
4834 }
4835
4836 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
4837 int emcp_method_count) {
4838 int obsolete_method_count = old_methods->length() - emcp_method_count;
4839
4840 if (emcp_method_count != 0 && obsolete_method_count != 0 &&
4841 _previous_versions != nullptr) {
4842 // We have a mix of obsolete and EMCP methods so we have to
4843 // clear out any matching EMCP method entries the hard way.
4844 int local_count = 0;
4845 for (int i = 0; i < old_methods->length(); i++) {
4846 Method* old_method = old_methods->at(i);
4847 if (old_method->is_obsolete()) {
4848 // only obsolete methods are interesting
4849 Symbol* m_name = old_method->name();
4850 Symbol* m_signature = old_method->signature();
4851
4852 // previous versions are linked together through the InstanceKlass
4853 int j = 0;
4854 for (InstanceKlass* prev_version = _previous_versions;
4855 prev_version != nullptr;
4856 prev_version = prev_version->previous_versions(), j++) {
4857
4858 Array<Method*>* method_refs = prev_version->methods();
4859 for (int k = 0; k < method_refs->length(); k++) {
4860 Method* method = method_refs->at(k);
4861
4862 if (!method->is_obsolete() &&
4863 method->name() == m_name &&
4864 method->signature() == m_signature) {
4865 // The current RedefineClasses() call has made all EMCP
4866 // versions of this method obsolete so mark it as obsolete
4867 log_trace(redefine, class, iklass, add)
4868 ("%s(%s): flush obsolete method @%d in version @%d",
4869 m_name->as_C_string(), m_signature->as_C_string(), k, j);
4870
4871 method->set_is_obsolete();
4872 break;
4873 }
4874 }
4875
4876 // The previous loop may not find a matching EMCP method, but
4877 // that doesn't mean that we can optimize and not go any
4878 // further back in the PreviousVersion generations. The EMCP
4879 // method for this generation could have already been made obsolete,
4880 // but there still may be an older EMCP method that has not
4881 // been made obsolete.
4882 }
4883
4884 if (++local_count >= obsolete_method_count) {
4885 // no more obsolete methods so bail out now
4886 break;
4887 }
4888 }
4889 }
4890 }
4891 }
4892
4893 // Save the scratch_class as the previous version if any of the methods are running.
4894 // The previous_versions are used to set breakpoints in EMCP methods and they are
4895 // also used to clean MethodData links to redefined methods that are no longer running.
4896 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
4897 int emcp_method_count) {
4898 assert(Thread::current()->is_VM_thread(),
4899 "only VMThread can add previous versions");
4900
4901 ResourceMark rm;
4902 log_trace(redefine, class, iklass, add)
4903 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
4904
4905 // Clean out old previous versions for this class
4906 purge_previous_version_list();
4907
4908 // Mark newly obsolete methods in remaining previous versions. An EMCP method from
4909 // a previous redefinition may be made obsolete by this redefinition.
4910 Array<Method*>* old_methods = scratch_class->methods();
4911 mark_newly_obsolete_methods(old_methods, emcp_method_count);
4912
4913 // If the constant pool for this previous version of the class
4914 // is not marked as being on the stack, then none of the methods
4915 // in this previous version of the class are on the stack so
4916 // we don't need to add this as a previous version.
4917 ConstantPool* cp_ref = scratch_class->constants();
4918 if (!cp_ref->on_stack()) {
4919 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
4920 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
4921 return;
4922 }
4923
4924 // Add previous version if any methods are still running or if this is
4925 // a shared class which should never be removed.
4926 assert(scratch_class->previous_versions() == nullptr, "shouldn't have a previous version");
4927 scratch_class->link_previous_versions(previous_versions());
4928 link_previous_versions(scratch_class);
4929 if (cp_ref->in_aot_cache()) {
4930 log_trace(redefine, class, iklass, add) ("scratch class added; class is shared");
4931 } else {
4932 // We only set clean_previous_versions flag for processing during class
4933 // unloading for non-shared classes.
4934 _should_clean_previous_versions = true;
4935 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
4936 }
4937 } // end add_previous_version()
4938
4939 #endif // INCLUDE_JVMTI
4940
4941 Method* InstanceKlass::method_with_idnum(int idnum) const {
4942 Method* m = nullptr;
4943 if (idnum < methods()->length()) {
4944 m = methods()->at(idnum);
4945 }
4946 if (m == nullptr || m->method_idnum() != idnum) {
4947 for (int index = 0; index < methods()->length(); ++index) {
4948 m = methods()->at(index);
4949 if (m->method_idnum() == idnum) {
4950 return m;
4951 }
4952 }
4953 // None found, return null for the caller to handle.
4954 return nullptr;
4955 }
4956 return m;
4957 }
4958
4959
4960 Method* InstanceKlass::method_with_orig_idnum(int idnum) const {
4961 if (idnum >= methods()->length()) {
4962 return nullptr;
4963 }
4964 Method* m = methods()->at(idnum);
4965 if (m != nullptr && m->orig_method_idnum() == idnum) {
4966 return m;
4967 }
4968 // Obsolete method idnum does not match the original idnum
4969 for (int index = 0; index < methods()->length(); ++index) {
4970 m = methods()->at(index);
4971 if (m->orig_method_idnum() == idnum) {
4972 return m;
4973 }
4974 }
4975 // None found, return null for the caller to handle.
4976 return nullptr;
4977 }
4978
4979
4980 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) const {
4981 const InstanceKlass* holder = get_klass_version(version);
4982 if (holder == nullptr) {
4983 return nullptr; // The version of klass is gone, no method is found
4984 }
4985 return holder->method_with_orig_idnum(idnum);
4986 }
4987
4988 #if INCLUDE_JVMTI
4989 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
4990 return _cached_class_file;
4991 }
4992
4993 jint InstanceKlass::get_cached_class_file_len() {
4994 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
4995 }
4996
4997 unsigned char * InstanceKlass::get_cached_class_file_bytes() {
4998 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
4999 }
5000 #endif
5001
5002 // Make a step iterating over the class hierarchy under the root class.
5003 // Skips subclasses if requested.
5004 void ClassHierarchyIterator::next() {
5005 assert(_current != nullptr, "required");
5006 if (_visit_subclasses && _current->subklass() != nullptr) {
5007 _current = _current->subklass();
5008 return; // visit next subclass
5009 }
5010 _visit_subclasses = true; // reset
5011 while (_current->next_sibling() == nullptr && _current != _root) {
5012 _current = _current->java_super(); // backtrack; no more sibling subclasses left
5013 }
5014 if (_current == _root) {
5015 // Iteration is over (back at root after backtracking). Invalidate the iterator.
5016 _current = nullptr;
5017 return;
5018 }
5019 _current = _current->next_sibling();
5020 return; // visit next sibling subclass
5021 }