1 /*
2 * Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotClassInitializer.hpp"
26 #include "cds/aotLinkedClassBulkLoader.hpp"
27 #include "cds/aotMetaspace.hpp"
28 #include "cds/archiveUtils.hpp"
29 #include "cds/cdsConfig.hpp"
30 #include "cds/cdsEnumKlass.hpp"
31 #include "cds/classListWriter.hpp"
32 #include "cds/heapShared.hpp"
33 #include "classfile/classFileParser.hpp"
34 #include "classfile/classFileStream.hpp"
35 #include "classfile/classLoader.hpp"
36 #include "classfile/classLoaderData.inline.hpp"
37 #include "classfile/javaClasses.hpp"
38 #include "classfile/moduleEntry.hpp"
39 #include "classfile/systemDictionary.hpp"
40 #include "classfile/systemDictionaryShared.hpp"
41 #include "classfile/verifier.hpp"
42 #include "classfile/vmClasses.hpp"
43 #include "classfile/vmSymbols.hpp"
44 #include "code/codeCache.hpp"
45 #include "code/dependencyContext.hpp"
46 #include "compiler/compilationPolicy.hpp"
47 #include "compiler/compileBroker.hpp"
48 #include "gc/shared/collectedHeap.inline.hpp"
49 #include "interpreter/bytecodeStream.hpp"
50 #include "interpreter/oopMapCache.hpp"
51 #include "interpreter/rewriter.hpp"
52 #include "jvm.h"
53 #include "jvmtifiles/jvmti.h"
54 #include "klass.inline.hpp"
55 #include "logging/log.hpp"
56 #include "logging/logMessage.hpp"
57 #include "logging/logStream.hpp"
58 #include "memory/allocation.inline.hpp"
59 #include "memory/iterator.inline.hpp"
60 #include "memory/metadataFactory.hpp"
61 #include "memory/metaspaceClosure.hpp"
62 #include "memory/oopFactory.hpp"
63 #include "memory/resourceArea.hpp"
64 #include "memory/universe.hpp"
65 #include "oops/constantPool.hpp"
66 #include "oops/fieldStreams.inline.hpp"
67 #include "oops/inlineKlass.hpp"
68 #include "oops/instanceClassLoaderKlass.hpp"
69 #include "oops/instanceKlass.inline.hpp"
70 #include "oops/instanceMirrorKlass.hpp"
71 #include "oops/instanceOop.hpp"
72 #include "oops/instanceStackChunkKlass.hpp"
73 #include "oops/klass.inline.hpp"
74 #include "oops/layoutKind.hpp"
75 #include "oops/markWord.hpp"
76 #include "oops/method.hpp"
77 #include "oops/oop.inline.hpp"
78 #include "oops/recordComponent.hpp"
79 #include "oops/refArrayKlass.hpp"
80 #include "oops/symbol.hpp"
81 #include "prims/jvmtiExport.hpp"
82 #include "prims/jvmtiRedefineClasses.hpp"
83 #include "prims/jvmtiThreadState.hpp"
84 #include "prims/methodComparator.hpp"
85 #include "runtime/arguments.hpp"
86 #include "runtime/atomicAccess.hpp"
87 #include "runtime/deoptimization.hpp"
88 #include "runtime/fieldDescriptor.inline.hpp"
89 #include "runtime/handles.inline.hpp"
90 #include "runtime/javaCalls.hpp"
91 #include "runtime/javaThread.inline.hpp"
92 #include "runtime/mutexLocker.hpp"
93 #include "runtime/orderAccess.hpp"
94 #include "runtime/os.inline.hpp"
95 #include "runtime/reflection.hpp"
96 #include "runtime/synchronizer.hpp"
97 #include "runtime/threads.hpp"
98 #include "services/classLoadingService.hpp"
99 #include "services/finalizerService.hpp"
100 #include "services/threadService.hpp"
101 #include "utilities/dtrace.hpp"
102 #include "utilities/events.hpp"
103 #include "utilities/macros.hpp"
104 #include "utilities/nativeStackPrinter.hpp"
105 #include "utilities/ostream.hpp"
106 #include "utilities/stringUtils.hpp"
107 #ifdef COMPILER1
108 #include "c1/c1_Compiler.hpp"
109 #endif
110 #if INCLUDE_JFR
111 #include "jfr/jfrEvents.hpp"
112 #endif
113
114 #ifdef DTRACE_ENABLED
115
116
117 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
118 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
119 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
120 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
121 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
122 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
123 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
124 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
125 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \
126 { \
127 char* data = nullptr; \
128 int len = 0; \
129 Symbol* clss_name = name(); \
130 if (clss_name != nullptr) { \
131 data = (char*)clss_name->bytes(); \
132 len = clss_name->utf8_length(); \
133 } \
134 HOTSPOT_CLASS_INITIALIZATION_##type( \
135 data, len, (void*)class_loader(), thread_type); \
136 }
137
138 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \
139 { \
140 char* data = nullptr; \
141 int len = 0; \
142 Symbol* clss_name = name(); \
143 if (clss_name != nullptr) { \
144 data = (char*)clss_name->bytes(); \
145 len = clss_name->utf8_length(); \
146 } \
147 HOTSPOT_CLASS_INITIALIZATION_##type( \
148 data, len, (void*)class_loader(), thread_type, wait); \
149 }
150
151 #else // ndef DTRACE_ENABLED
152
153 #define DTRACE_CLASSINIT_PROBE(type, thread_type)
154 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
155
156 #endif // ndef DTRACE_ENABLED
157
158 void InlineLayoutInfo::metaspace_pointers_do(MetaspaceClosure* it) {
159 log_trace(cds)("Iter(InlineFieldInfo): %p", this);
160 it->push(&_klass);
161 }
162
163 void InlineLayoutInfo::print() const {
164 print_on(tty);
165 }
166
167 void InlineLayoutInfo::print_on(outputStream* st) const {
168 st->print_cr("_klass: " PTR_FORMAT, p2i(_klass));
169 if (_klass != nullptr) {
170 StreamIndentor si(st);
171 _klass->print_on(st);
172 st->cr();
173 }
174
175 st->print("_layout: ");
176 LayoutKindHelper::print_on(_kind, st);
177 st->cr();
178
179 st->print("_null_marker_offset: %d", _null_marker_offset);
180 }
181
182 // A value class is considered naturally atomic if its layout,
183 // once all fields flattening have been applied, contains a single primitive
184 // or oop field. Because primitive types and oops are already handled
185 // atomically by the JVM, it means that there's no need to take
186 // special precautions when reading or writing this value to guarantee
187 // cross-fields invariants. Nullability has to be taken into consideration,
188 // as the null-marker has to be considered as a pseudo-field which must
189 // be kept consistent with the payload. The only kind of value class
190 // that can be considered naturally atomic when nullable is the empty
191 // value classes because the dummy field is re-used as a null-marker.
192 bool InstanceKlass::is_naturally_atomic(bool null_free) const {
193 assert(!is_identity_class(), "Doesn't have sense for an identity class");
194 if (null_free) {
195 // No extra null-marker, just check the layout of the fields
196 return _misc_flags.is_naturally_atomic();
197 } else {
198 // Requires a null-marker, can't have any other fields
199 return InlineKlass::cast(this)->is_empty_inline_type();
200 }
201 }
202
203 bool InstanceKlass::_finalization_enabled = true;
204 static int call_class_initializer_counter = 0; // for debugging
205
206 static inline bool is_class_loader(const Symbol* class_name,
207 const ClassFileParser& parser) {
208 assert(class_name != nullptr, "invariant");
209
210 if (class_name == vmSymbols::java_lang_ClassLoader()) {
211 return true;
212 }
213
214 if (vmClasses::ClassLoader_klass_is_loaded()) {
215 const Klass* const super_klass = parser.super_klass();
216 if (super_klass != nullptr) {
217 if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) {
218 return true;
219 }
220 }
221 }
222 return false;
223 }
224
225 bool InstanceKlass::field_is_null_free_inline_type(int index) const {
226 return field(index).field_flags().is_null_free_inline_type();
227 }
228
229 bool InstanceKlass::is_class_in_loadable_descriptors_attribute(Symbol* name) const {
230 if (_loadable_descriptors == nullptr) return false;
231 for (int i = 0; i < _loadable_descriptors->length(); i++) {
232 Symbol* class_name = _constants->symbol_at(_loadable_descriptors->at(i));
233 if (class_name == name) return true;
234 }
235 return false;
236 }
237
238 static inline bool is_stack_chunk_class(const Symbol* class_name,
239 const ClassLoaderData* loader_data) {
240 return (class_name == vmSymbols::jdk_internal_vm_StackChunk() &&
241 loader_data->is_the_null_class_loader_data());
242 }
243
244 // private: called to verify that k is a static member of this nest.
245 // We know that k is an instance class in the same package and hence the
246 // same classloader.
247 bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const {
248 assert(!is_hidden(), "unexpected hidden class");
249 if (_nest_members == nullptr || _nest_members == Universe::the_empty_short_array()) {
250 if (log_is_enabled(Trace, class, nestmates)) {
251 ResourceMark rm(current);
252 log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s",
253 k->external_name(), this->external_name());
254 }
255 return false;
256 }
257
258 if (log_is_enabled(Trace, class, nestmates)) {
259 ResourceMark rm(current);
260 log_trace(class, nestmates)("Checking nest membership of %s in %s",
261 k->external_name(), this->external_name());
262 }
263
264 // Check for the named class in _nest_members.
265 // We don't resolve, or load, any classes.
266 for (int i = 0; i < _nest_members->length(); i++) {
267 int cp_index = _nest_members->at(i);
268 Symbol* name = _constants->klass_name_at(cp_index);
269 if (name == k->name()) {
270 log_trace(class, nestmates)("- named class found at nest_members[%d] => cp[%d]", i, cp_index);
271 return true;
272 }
273 }
274 log_trace(class, nestmates)("- class is NOT a nest member!");
275 return false;
276 }
277
278 // Called to verify that k is a permitted subclass of this class.
279 // The incoming stringStream is used to format the messages for error logging and for the caller
280 // to use for exception throwing.
281 bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k, stringStream& ss) const {
282 Thread* current = Thread::current();
283 assert(k != nullptr, "sanity check");
284 assert(_permitted_subclasses != nullptr && _permitted_subclasses != Universe::the_empty_short_array(),
285 "unexpected empty _permitted_subclasses array");
286
287 if (log_is_enabled(Trace, class, sealed)) {
288 ResourceMark rm(current);
289 log_trace(class, sealed)("Checking for permitted subclass %s in %s",
290 k->external_name(), this->external_name());
291 }
292
293 // Check that the class and its super are in the same module.
294 if (k->module() != this->module()) {
295 ss.print("Failed same module check: subclass %s is in module '%s' with loader %s, "
296 "and sealed class %s is in module '%s' with loader %s",
297 k->external_name(),
298 k->module()->name_as_C_string(),
299 k->module()->loader_data()->loader_name_and_id(),
300 this->external_name(),
301 this->module()->name_as_C_string(),
302 this->module()->loader_data()->loader_name_and_id());
303 log_trace(class, sealed)(" - %s", ss.as_string());
304 return false;
305 }
306
307 if (!k->is_public() && !is_same_class_package(k)) {
308 ss.print("Failed same package check: non-public subclass %s is in package '%s' with classloader %s, "
309 "and sealed class %s is in package '%s' with classloader %s",
310 k->external_name(),
311 k->package() != nullptr ? k->package()->name()->as_C_string() : "unnamed",
312 k->module()->loader_data()->loader_name_and_id(),
313 this->external_name(),
314 this->package() != nullptr ? this->package()->name()->as_C_string() : "unnamed",
315 this->module()->loader_data()->loader_name_and_id());
316 log_trace(class, sealed)(" - %s", ss.as_string());
317 return false;
318 }
319
320 for (int i = 0; i < _permitted_subclasses->length(); i++) {
321 int cp_index = _permitted_subclasses->at(i);
322 Symbol* name = _constants->klass_name_at(cp_index);
323 if (name == k->name()) {
324 log_trace(class, sealed)("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index);
325 return true;
326 }
327 }
328
329 ss.print("Failed listed permitted subclass check: class %s is not a permitted subclass of %s",
330 k->external_name(), this->external_name());
331 log_trace(class, sealed)(" - %s", ss.as_string());
332 return false;
333 }
334
335 // Return nest-host class, resolving, validating and saving it if needed.
336 // In cases where this is called from a thread that cannot do classloading
337 // (such as a native JIT thread) then we simply return null, which in turn
338 // causes the access check to return false. Such code will retry the access
339 // from a more suitable environment later. Otherwise the _nest_host is always
340 // set once this method returns.
341 // Any errors from nest-host resolution must be preserved so they can be queried
342 // from higher-level access checking code, and reported as part of access checking
343 // exceptions.
344 // VirtualMachineErrors are propagated with a null return.
345 // Under any conditions where the _nest_host can be set to non-null the resulting
346 // value of it and, if applicable, the nest host resolution/validation error,
347 // are idempotent.
348 InstanceKlass* InstanceKlass::nest_host(TRAPS) {
349 InstanceKlass* nest_host_k = _nest_host;
350 if (nest_host_k != nullptr) {
351 return nest_host_k;
352 }
353
354 ResourceMark rm(THREAD);
355
356 // need to resolve and save our nest-host class.
357 if (_nest_host_index != 0) { // we have a real nest_host
358 // Before trying to resolve check if we're in a suitable context
359 bool can_resolve = THREAD->can_call_java();
360 if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) {
361 log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread",
362 this->external_name());
363 return nullptr; // sentinel to say "try again from a different context"
364 }
365
366 log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s",
367 this->external_name(),
368 _constants->klass_name_at(_nest_host_index)->as_C_string());
369
370 Klass* k = _constants->klass_at(_nest_host_index, THREAD);
371 if (HAS_PENDING_EXCEPTION) {
372 if (PENDING_EXCEPTION->is_a(vmClasses::VirtualMachineError_klass())) {
373 return nullptr; // propagate VMEs
374 }
375 stringStream ss;
376 char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string();
377 ss.print("Nest host resolution of %s with host %s failed: ",
378 this->external_name(), target_host_class);
379 java_lang_Throwable::print(PENDING_EXCEPTION, &ss);
380 constantPoolHandle cph(THREAD, constants());
381 SystemDictionary::add_nest_host_error(cph, _nest_host_index, ss);
382 CLEAR_PENDING_EXCEPTION;
383
384 log_trace(class, nestmates)("%s", ss.base());
385 } else {
386 // A valid nest-host is an instance class in the current package that lists this
387 // class as a nest member. If any of these conditions are not met the class is
388 // its own nest-host.
389 const char* error = nullptr;
390
391 // JVMS 5.4.4 indicates package check comes first
392 if (is_same_class_package(k)) {
393 // Now check actual membership. We can't be a member if our "host" is
394 // not an instance class.
395 if (k->is_instance_klass()) {
396 nest_host_k = InstanceKlass::cast(k);
397 bool is_member = nest_host_k->has_nest_member(THREAD, this);
398 if (is_member) {
399 _nest_host = nest_host_k; // save resolved nest-host value
400
401 log_trace(class, nestmates)("Resolved nest-host of %s to %s",
402 this->external_name(), k->external_name());
403 return nest_host_k;
404 } else {
405 error = "current type is not listed as a nest member";
406 }
407 } else {
408 error = "host is not an instance class";
409 }
410 } else {
411 error = "types are in different packages";
412 }
413
414 // something went wrong, so record what and log it
415 {
416 stringStream ss;
417 ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s",
418 this->external_name(),
419 this->class_loader_data()->loader_name_and_id(),
420 k->external_name(),
421 k->class_loader_data()->loader_name_and_id(),
422 error);
423 constantPoolHandle cph(THREAD, constants());
424 SystemDictionary::add_nest_host_error(cph, _nest_host_index, ss);
425 log_trace(class, nestmates)("%s", ss.base());
426 }
427 }
428 } else {
429 log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self",
430 this->external_name());
431 }
432
433 // Either not in an explicit nest, or else an error occurred, so
434 // the nest-host is set to `this`. Any thread that sees this assignment
435 // will also see any setting of nest_host_error(), if applicable.
436 return (_nest_host = this);
437 }
438
439 // Dynamic nest member support: set this class's nest host to the given class.
440 // This occurs as part of the class definition, as soon as the instanceKlass
441 // has been created and doesn't require further resolution. The code:
442 // lookup().defineHiddenClass(bytes_for_X, NESTMATE);
443 // results in:
444 // class_of_X.set_nest_host(lookup().lookupClass().getNestHost())
445 // If it has an explicit _nest_host_index or _nest_members, these will be ignored.
446 // We also know the "host" is a valid nest-host in the same package so we can
447 // assert some of those facts.
448 void InstanceKlass::set_nest_host(InstanceKlass* host) {
449 assert(is_hidden(), "must be a hidden class");
450 assert(host != nullptr, "null nest host specified");
451 assert(_nest_host == nullptr, "current class has resolved nest-host");
452 assert(nest_host_error() == nullptr, "unexpected nest host resolution error exists: %s",
453 nest_host_error());
454 assert((host->_nest_host == nullptr && host->_nest_host_index == 0) ||
455 (host->_nest_host == host), "proposed host is not a valid nest-host");
456 // Can't assert this as package is not set yet:
457 // assert(is_same_class_package(host), "proposed host is in wrong package");
458
459 if (log_is_enabled(Trace, class, nestmates)) {
460 ResourceMark rm;
461 const char* msg = "";
462 // a hidden class does not expect a statically defined nest-host
463 if (_nest_host_index > 0) {
464 msg = "(the NestHost attribute in the current class is ignored)";
465 } else if (_nest_members != nullptr && _nest_members != Universe::the_empty_short_array()) {
466 msg = "(the NestMembers attribute in the current class is ignored)";
467 }
468 log_trace(class, nestmates)("Injected type %s into the nest of %s %s",
469 this->external_name(),
470 host->external_name(),
471 msg);
472 }
473 // set dynamic nest host
474 _nest_host = host;
475 // Record dependency to keep nest host from being unloaded before this class.
476 ClassLoaderData* this_key = class_loader_data();
477 assert(this_key != nullptr, "sanity");
478 this_key->record_dependency(host);
479 }
480
481 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
482 // or we are k's nest_host - all of which is covered by comparing the two
483 // resolved_nest_hosts.
484 // Any exceptions (i.e. VMEs) are propagated.
485 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) {
486
487 assert(this != k, "this should be handled by higher-level code");
488
489 // Per JVMS 5.4.4 we first resolve and validate the current class, then
490 // the target class k.
491
492 InstanceKlass* cur_host = nest_host(CHECK_false);
493 if (cur_host == nullptr) {
494 return false;
495 }
496
497 Klass* k_nest_host = k->nest_host(CHECK_false);
498 if (k_nest_host == nullptr) {
499 return false;
500 }
501
502 bool access = (cur_host == k_nest_host);
503
504 ResourceMark rm(THREAD);
505 log_trace(class, nestmates)("Class %s does %shave nestmate access to %s",
506 this->external_name(),
507 access ? "" : "NOT ",
508 k->external_name());
509 return access;
510 }
511
512 const char* InstanceKlass::nest_host_error() {
513 if (_nest_host_index == 0) {
514 return nullptr;
515 } else {
516 constantPoolHandle cph(Thread::current(), constants());
517 return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index);
518 }
519 }
520
521 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) {
522 const int size = InstanceKlass::size(parser.vtable_size(),
523 parser.itable_size(),
524 nonstatic_oop_map_size(parser.total_oop_map_count()),
525 parser.is_interface(),
526 parser.is_inline_type());
527
528 const Symbol* const class_name = parser.class_name();
529 assert(class_name != nullptr, "invariant");
530 ClassLoaderData* loader_data = parser.loader_data();
531 assert(loader_data != nullptr, "invariant");
532
533 InstanceKlass* ik;
534
535 // Allocation
536 if (parser.is_instance_ref_klass()) {
537 // java.lang.ref.Reference
538 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser);
539 } else if (class_name == vmSymbols::java_lang_Class()) {
540 // mirror - java.lang.Class
541 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser);
542 } else if (is_stack_chunk_class(class_name, loader_data)) {
543 // stack chunk
544 ik = new (loader_data, size, THREAD) InstanceStackChunkKlass(parser);
545 } else if (is_class_loader(class_name, parser)) {
546 // class loader - java.lang.ClassLoader
547 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser);
548 } else if (parser.is_inline_type()) {
549 // inline type
550 ik = new (loader_data, size, THREAD) InlineKlass(parser);
551 } else {
552 // normal
553 ik = new (loader_data, size, THREAD) InstanceKlass(parser);
554 }
555
556 assert(ik == nullptr || CompressedKlassPointers::is_encodable(ik),
557 "Klass " PTR_FORMAT "needs a narrow Klass ID, but is not encodable", p2i(ik));
558
559 // Check for pending exception before adding to the loader data and incrementing
560 // class count. Can get OOM here.
561 if (HAS_PENDING_EXCEPTION) {
562 return nullptr;
563 }
564
565 #ifdef ASSERT
566 ik->bounds_check((address) ik->start_of_vtable(), false, size);
567 ik->bounds_check((address) ik->start_of_itable(), false, size);
568 ik->bounds_check((address) ik->end_of_itable(), true, size);
569 ik->bounds_check((address) ik->end_of_nonstatic_oop_maps(), true, size);
570 #endif //ASSERT
571 return ik;
572 }
573
574 #ifndef PRODUCT
575 bool InstanceKlass::bounds_check(address addr, bool edge_ok, intptr_t size_in_bytes) const {
576 const char* bad = nullptr;
577 address end = nullptr;
578 if (addr < (address)this) {
579 bad = "before";
580 } else if (addr == (address)this) {
581 if (edge_ok) return true;
582 bad = "just before";
583 } else if (addr == (end = (address)this + sizeof(intptr_t) * (size_in_bytes < 0 ? size() : size_in_bytes))) {
584 if (edge_ok) return true;
585 bad = "just after";
586 } else if (addr > end) {
587 bad = "after";
588 } else {
589 return true;
590 }
591 tty->print_cr("%s object bounds: " INTPTR_FORMAT " [" INTPTR_FORMAT ".." INTPTR_FORMAT "]",
592 bad, (intptr_t)addr, (intptr_t)this, (intptr_t)end);
593 Verbose = WizardMode = true; this->print(); //@@
594 return false;
595 }
596 #endif //PRODUCT
597
598 // copy method ordering from resource area to Metaspace
599 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) {
600 if (m != nullptr) {
601 // allocate a new array and copy contents (memcpy?)
602 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
603 for (int i = 0; i < m->length(); i++) {
604 _method_ordering->at_put(i, m->at(i));
605 }
606 } else {
607 _method_ordering = Universe::the_empty_int_array();
608 }
609 }
610
611 // create a new array of vtable_indices for default methods
612 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
613 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
614 assert(default_vtable_indices() == nullptr, "only create once");
615 set_default_vtable_indices(vtable_indices);
616 return vtable_indices;
617 }
618
619
620 InstanceKlass::InstanceKlass() {
621 assert(CDSConfig::is_dumping_static_archive() || CDSConfig::is_using_archive(), "only for CDS");
622 }
623
624 InstanceKlass::InstanceKlass(const ClassFileParser& parser, KlassKind kind, markWord prototype_header, ReferenceType reference_type) :
625 Klass(kind, prototype_header),
626 _nest_members(nullptr),
627 _nest_host(nullptr),
628 _permitted_subclasses(nullptr),
629 _record_components(nullptr),
630 _static_field_size(parser.static_field_size()),
631 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
632 _itable_len(parser.itable_size()),
633 _nest_host_index(0),
634 _init_state(allocated),
635 _reference_type(reference_type),
636 _acmp_maps_offset(0),
637 _init_thread(nullptr),
638 _inline_layout_info_array(nullptr),
639 _loadable_descriptors(nullptr),
640 _acmp_maps_array(nullptr),
641 _adr_inline_klass_members(nullptr)
642 {
643 set_vtable_length(parser.vtable_size());
644 set_access_flags(parser.access_flags());
645 if (parser.is_hidden()) set_is_hidden();
646 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
647 false));
648 if (parser.has_inlined_fields()) {
649 set_has_inlined_fields();
650 }
651
652 assert(nullptr == _methods, "underlying memory not zeroed?");
653 assert(is_instance_klass(), "is layout incorrect?");
654 assert(size_helper() == parser.layout_size(), "incorrect size_helper?");
655 }
656
657 void InstanceKlass::set_is_cloneable() {
658 if (name() == vmSymbols::java_lang_invoke_MemberName()) {
659 assert(is_final(), "no subclasses allowed");
660 // MemberName cloning should not be intrinsified and always happen in JVM_Clone.
661 } else if (reference_type() != REF_NONE) {
662 // Reference cloning should not be intrinsified and always happen in JVM_Clone.
663 } else {
664 set_is_cloneable_fast();
665 }
666 }
667
668 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
669 Array<Method*>* methods) {
670 if (methods != nullptr && methods != Universe::the_empty_method_array() &&
671 !methods->in_aot_cache()) {
672 for (int i = 0; i < methods->length(); i++) {
673 Method* method = methods->at(i);
674 if (method == nullptr) continue; // maybe null if error processing
675 // Only want to delete methods that are not executing for RedefineClasses.
676 // The previous version will point to them so they're not totally dangling
677 assert (!method->on_stack(), "shouldn't be called with methods on stack");
678 MetadataFactory::free_metadata(loader_data, method);
679 }
680 MetadataFactory::free_array<Method*>(loader_data, methods);
681 }
682 }
683
684 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
685 const InstanceKlass* super_klass,
686 Array<InstanceKlass*>* local_interfaces,
687 Array<InstanceKlass*>* transitive_interfaces) {
688 // Only deallocate transitive interfaces if not empty, same as super class
689 // or same as local interfaces. See code in parseClassFile.
690 Array<InstanceKlass*>* ti = transitive_interfaces;
691 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
692 // check that the interfaces don't come from super class
693 Array<InstanceKlass*>* sti = (super_klass == nullptr) ? nullptr :
694 super_klass->transitive_interfaces();
695 if (ti != sti && ti != nullptr && !ti->in_aot_cache()) {
696 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
697 }
698 }
699
700 // local interfaces can be empty
701 if (local_interfaces != Universe::the_empty_instance_klass_array() &&
702 local_interfaces != nullptr && !local_interfaces->in_aot_cache()) {
703 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
704 }
705 }
706
707 void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
708 Array<RecordComponent*>* record_components) {
709 if (record_components != nullptr && !record_components->in_aot_cache()) {
710 for (int i = 0; i < record_components->length(); i++) {
711 RecordComponent* record_component = record_components->at(i);
712 MetadataFactory::free_metadata(loader_data, record_component);
713 }
714 MetadataFactory::free_array<RecordComponent*>(loader_data, record_components);
715 }
716 }
717
718 // This function deallocates the metadata and C heap pointers that the
719 // InstanceKlass points to.
720 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
721 // Orphan the mirror first, CMS thinks it's still live.
722 if (java_mirror() != nullptr) {
723 java_lang_Class::set_klass(java_mirror(), nullptr);
724 }
725
726 // Also remove mirror from handles
727 loader_data->remove_handle(_java_mirror);
728
729 // Need to take this class off the class loader data list.
730 loader_data->remove_class(this);
731
732 // The array_klass for this class is created later, after error handling.
733 // For class redefinition, we keep the original class so this scratch class
734 // doesn't have an array class. Either way, assert that there is nothing
735 // to deallocate.
736 assert(array_klasses() == nullptr, "array classes shouldn't be created for this class yet");
737
738 // Release C heap allocated data that this points to, which includes
739 // reference counting symbol names.
740 // Can't release the constant pool or MethodData C heap data here because the constant
741 // pool can be deallocated separately from the InstanceKlass for default methods and
742 // redefine classes. MethodData can also be released separately.
743 release_C_heap_structures(/* release_sub_metadata */ false);
744
745 deallocate_methods(loader_data, methods());
746 set_methods(nullptr);
747
748 deallocate_record_components(loader_data, record_components());
749 set_record_components(nullptr);
750
751 if (method_ordering() != nullptr &&
752 method_ordering() != Universe::the_empty_int_array() &&
753 !method_ordering()->in_aot_cache()) {
754 MetadataFactory::free_array<int>(loader_data, method_ordering());
755 }
756 set_method_ordering(nullptr);
757
758 // default methods can be empty
759 if (default_methods() != nullptr &&
760 default_methods() != Universe::the_empty_method_array() &&
761 !default_methods()->in_aot_cache()) {
762 MetadataFactory::free_array<Method*>(loader_data, default_methods());
763 }
764 // Do NOT deallocate the default methods, they are owned by superinterfaces.
765 set_default_methods(nullptr);
766
767 // default methods vtable indices can be empty
768 if (default_vtable_indices() != nullptr &&
769 !default_vtable_indices()->in_aot_cache()) {
770 MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
771 }
772 set_default_vtable_indices(nullptr);
773
774
775 // This array is in Klass, but remove it with the InstanceKlass since
776 // this place would be the only caller and it can share memory with transitive
777 // interfaces.
778 if (secondary_supers() != nullptr &&
779 secondary_supers() != Universe::the_empty_klass_array() &&
780 // see comments in compute_secondary_supers about the following cast
781 (address)(secondary_supers()) != (address)(transitive_interfaces()) &&
782 !secondary_supers()->in_aot_cache()) {
783 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
784 }
785 set_secondary_supers(nullptr, SECONDARY_SUPERS_BITMAP_EMPTY);
786
787 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
788 set_transitive_interfaces(nullptr);
789 set_local_interfaces(nullptr);
790
791 if (fieldinfo_stream() != nullptr && !fieldinfo_stream()->in_aot_cache()) {
792 MetadataFactory::free_array<u1>(loader_data, fieldinfo_stream());
793 }
794 set_fieldinfo_stream(nullptr);
795
796 if (fieldinfo_search_table() != nullptr && !fieldinfo_search_table()->in_aot_cache()) {
797 MetadataFactory::free_array<u1>(loader_data, fieldinfo_search_table());
798 }
799 set_fieldinfo_search_table(nullptr);
800
801 if (fields_status() != nullptr && !fields_status()->in_aot_cache()) {
802 MetadataFactory::free_array<FieldStatus>(loader_data, fields_status());
803 }
804 set_fields_status(nullptr);
805
806 if (inline_layout_info_array() != nullptr) {
807 MetadataFactory::free_array<InlineLayoutInfo>(loader_data, inline_layout_info_array());
808 }
809 set_inline_layout_info_array(nullptr);
810
811 // If a method from a redefined class is using this constant pool, don't
812 // delete it, yet. The new class's previous version will point to this.
813 if (constants() != nullptr) {
814 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
815 if (!constants()->in_aot_cache()) {
816 HeapShared::remove_scratch_resolved_references(constants());
817 MetadataFactory::free_metadata(loader_data, constants());
818 }
819 // Delete any cached resolution errors for the constant pool
820 SystemDictionary::delete_resolution_error(constants());
821
822 set_constants(nullptr);
823 }
824
825 if (inner_classes() != nullptr &&
826 inner_classes() != Universe::the_empty_short_array() &&
827 !inner_classes()->in_aot_cache()) {
828 MetadataFactory::free_array<jushort>(loader_data, inner_classes());
829 }
830 set_inner_classes(nullptr);
831
832 if (nest_members() != nullptr &&
833 nest_members() != Universe::the_empty_short_array() &&
834 !nest_members()->in_aot_cache()) {
835 MetadataFactory::free_array<jushort>(loader_data, nest_members());
836 }
837 set_nest_members(nullptr);
838
839 if (permitted_subclasses() != nullptr &&
840 permitted_subclasses() != Universe::the_empty_short_array() &&
841 !permitted_subclasses()->in_aot_cache()) {
842 MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
843 }
844 set_permitted_subclasses(nullptr);
845
846 if (loadable_descriptors() != nullptr &&
847 loadable_descriptors() != Universe::the_empty_short_array() &&
848 !loadable_descriptors()->in_aot_cache()) {
849 MetadataFactory::free_array<jushort>(loader_data, loadable_descriptors());
850 }
851 set_loadable_descriptors(nullptr);
852
853 if (acmp_maps_array() != nullptr) {
854 MetadataFactory::free_array<int>(loader_data, acmp_maps_array());
855 }
856 set_acmp_maps_array(nullptr);
857
858 // We should deallocate the Annotations instance if it's not in shared spaces.
859 if (annotations() != nullptr && !annotations()->in_aot_cache()) {
860 MetadataFactory::free_metadata(loader_data, annotations());
861 }
862 set_annotations(nullptr);
863
864 SystemDictionaryShared::handle_class_unloading(this);
865
866 #if INCLUDE_CDS_JAVA_HEAP
867 if (CDSConfig::is_dumping_heap()) {
868 HeapShared::remove_scratch_objects(this);
869 }
870 #endif
871 }
872
873 bool InstanceKlass::is_record() const {
874 return _record_components != nullptr &&
875 is_final() &&
876 super() == vmClasses::Record_klass();
877 }
878
879 bool InstanceKlass::is_sealed() const {
880 return _permitted_subclasses != nullptr &&
881 _permitted_subclasses != Universe::the_empty_short_array();
882 }
883
884 // JLS 8.9: An enum class is either implicitly final and derives
885 // from java.lang.Enum, or else is implicitly sealed to its
886 // anonymous subclasses. This query detects both kinds.
887 // It does not validate the finality or
888 // sealing conditions: it merely checks for a super of Enum.
889 // This is sufficient for recognizing well-formed enums.
890 bool InstanceKlass::is_enum_subclass() const {
891 InstanceKlass* s = super();
892 return (s == vmClasses::Enum_klass() ||
893 (s != nullptr && s->super() == vmClasses::Enum_klass()));
894 }
895
896 bool InstanceKlass::should_be_initialized() const {
897 return !is_initialized();
898 }
899
900 // Static size helper
901 int InstanceKlass::size(int vtable_length,
902 int itable_length,
903 int nonstatic_oop_map_size,
904 bool is_interface,
905 bool is_inline_type) {
906 return align_metadata_size(header_size() +
907 vtable_length +
908 itable_length +
909 nonstatic_oop_map_size +
910 (is_interface ? (int)sizeof(Klass*) / wordSize : 0) +
911 (is_inline_type ? (int)sizeof(InlineKlass::Members) / wordSize : 0));
912 }
913
914 int InstanceKlass::size() const {
915 return size(vtable_length(),
916 itable_length(),
917 nonstatic_oop_map_size(),
918 is_interface(),
919 is_inline_klass());
920 }
921
922 klassItable InstanceKlass::itable() const {
923 return klassItable(const_cast<InstanceKlass*>(this));
924 }
925
926 // JVMTI spec thinks there are signers and protection domain in the
927 // instanceKlass. These accessors pretend these fields are there.
928 // The hprof specification also thinks these fields are in InstanceKlass.
929 oop InstanceKlass::protection_domain() const {
930 // return the protection_domain from the mirror
931 return java_lang_Class::protection_domain(java_mirror());
932 }
933
934 objArrayOop InstanceKlass::signers() const {
935 // return the signers from the mirror
936 return java_lang_Class::signers(java_mirror());
937 }
938
939 oop InstanceKlass::init_lock() const {
940 // return the init lock from the mirror
941 oop lock = java_lang_Class::init_lock(java_mirror());
942 // Prevent reordering with any access of initialization state
943 OrderAccess::loadload();
944 assert(lock != nullptr || !is_not_initialized(), // initialized or in_error state
945 "only fully initialized state can have a null lock");
946 return lock;
947 }
948
949 // Set the initialization lock to null so the object can be GC'ed. Any racing
950 // threads to get this lock will see a null lock and will not lock.
951 // That's okay because they all check for initialized state after getting
952 // the lock and return. For preempted vthreads we keep the oop protected
953 // in the ObjectMonitor (see ObjectMonitor::set_object_strong()).
954 void InstanceKlass::fence_and_clear_init_lock() {
955 // make sure previous stores are all done, notably the init_state.
956 OrderAccess::storestore();
957 java_lang_Class::clear_init_lock(java_mirror());
958 assert(!is_not_initialized(), "class must be initialized now");
959 }
960
961 class PreemptableInitCall {
962 JavaThread* _thread;
963 bool _previous;
964 DEBUG_ONLY(InstanceKlass* _previous_klass;)
965 public:
966 PreemptableInitCall(JavaThread* thread, InstanceKlass* ik) : _thread(thread) {
967 _previous = thread->at_preemptable_init();
968 _thread->set_at_preemptable_init(true);
969 DEBUG_ONLY(_previous_klass = _thread->preempt_init_klass();)
970 DEBUG_ONLY(_thread->set_preempt_init_klass(ik));
971 }
972 ~PreemptableInitCall() {
973 _thread->set_at_preemptable_init(_previous);
974 DEBUG_ONLY(_thread->set_preempt_init_klass(_previous_klass));
975 }
976 };
977
978 void InstanceKlass::initialize_preemptable(TRAPS) {
979 if (this->should_be_initialized()) {
980 PreemptableInitCall pic(THREAD, this);
981 initialize_impl(THREAD);
982 } else {
983 assert(is_initialized(), "sanity check");
984 }
985 }
986
987 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
988 // process. The step comments refers to the procedure described in that section.
989 // Note: implementation moved to static method to expose the this pointer.
990 void InstanceKlass::initialize(TRAPS) {
991 if (this->should_be_initialized()) {
992 initialize_impl(CHECK);
993 // Note: at this point the class may be initialized
994 // OR it may be in the state of being initialized
995 // in case of recursive initialization!
996 } else {
997 assert(is_initialized(), "sanity check");
998 }
999 }
1000
1001 #ifdef ASSERT
1002 void InstanceKlass::assert_no_clinit_will_run_for_aot_initialized_class() const {
1003 assert(has_aot_initialized_mirror(), "must be");
1004
1005 InstanceKlass* s = super();
1006 if (s != nullptr) {
1007 DEBUG_ONLY(ResourceMark rm);
1008 assert(s->is_initialized(), "super class %s of aot-inited class %s must have been initialized",
1009 s->external_name(), external_name());
1010 s->assert_no_clinit_will_run_for_aot_initialized_class();
1011 }
1012
1013 Array<InstanceKlass*>* interfaces = local_interfaces();
1014 int len = interfaces->length();
1015 for (int i = 0; i < len; i++) {
1016 InstanceKlass* intf = interfaces->at(i);
1017 if (!intf->is_initialized()) {
1018 ResourceMark rm;
1019 // Note: an interface needs to be marked as is_initialized() only if
1020 // - it has a <clinit>
1021 // - it has declared a default method.
1022 assert(!intf->interface_needs_clinit_execution_as_super(/*also_check_supers*/false),
1023 "uninitialized super interface %s of aot-inited class %s must not have <clinit>",
1024 intf->external_name(), external_name());
1025 }
1026 }
1027 }
1028 #endif
1029
1030 #if INCLUDE_CDS
1031 // early_init -- we are moving this class into the fully_initialized state before the
1032 // JVM is able to execute any bytecodes. See AOTLinkedClassBulkLoader::is_initializing_classes_early().
1033 void InstanceKlass::initialize_with_aot_initialized_mirror(bool early_init, TRAPS) {
1034 assert(has_aot_initialized_mirror(), "must be");
1035 assert(CDSConfig::is_loading_heap(), "must be");
1036 assert(CDSConfig::is_using_aot_linked_classes(), "must be");
1037 assert_no_clinit_will_run_for_aot_initialized_class();
1038
1039 if (is_initialized()) {
1040 return;
1041 }
1042
1043 if (log_is_enabled(Info, aot, init)) {
1044 ResourceMark rm;
1045 log_info(aot, init)("%s (aot-inited%s)", external_name(), early_init ? ", early" : "");
1046 }
1047
1048 if (is_runtime_setup_required()) {
1049 assert(!early_init, "must not call");
1050 // Need to take the slow path, which will call the runtimeSetup() function instead
1051 // of <clinit>
1052 initialize(CHECK);
1053 return;
1054 }
1055
1056 LogTarget(Info, class, init) lt;
1057 if (lt.is_enabled()) {
1058 ResourceMark rm(THREAD);
1059 LogStream ls(lt);
1060 ls.print("%d Initializing ", call_class_initializer_counter++);
1061 name()->print_value_on(&ls);
1062 ls.print_cr("(aot-inited) (" PTR_FORMAT ") by thread \"%s\"",
1063 p2i(this), THREAD->name());
1064 }
1065
1066 if (early_init) {
1067 precond(AOTLinkedClassBulkLoader::is_initializing_classes_early());
1068 precond(is_linked());
1069 precond(init_thread() == nullptr);
1070 set_init_state(fully_initialized);
1071 fence_and_clear_init_lock();
1072 return;
1073 }
1074
1075 link_class(CHECK);
1076
1077 #ifdef ASSERT
1078 {
1079 Handle h_init_lock(THREAD, init_lock());
1080 ObjectLocker ol(h_init_lock, THREAD);
1081 assert(!is_initialized(), "sanity");
1082 assert(!is_being_initialized(), "sanity");
1083 assert(!is_in_error_state(), "sanity");
1084 }
1085 #endif
1086
1087 set_init_thread(THREAD);
1088 set_initialization_state_and_notify(fully_initialized, CHECK);
1089 }
1090 #endif
1091
1092 bool InstanceKlass::verify_code(TRAPS) {
1093 // 1) Verify the bytecodes
1094 return Verifier::verify(this, should_verify_class(), THREAD);
1095 }
1096
1097 static void load_classes_from_loadable_descriptors_attribute(InstanceKlass *ik, TRAPS) {
1098 if (ik->loadable_descriptors() != Universe::the_empty_short_array() && PreloadClasses) {
1099 ResourceMark rm(THREAD);
1100 HandleMark hm(THREAD);
1101 for (int i = 0; i < ik->loadable_descriptors()->length(); i++) {
1102 Symbol* sig = ik->constants()->symbol_at(ik->loadable_descriptors()->at(i));
1103 if (!Signature::has_envelope(sig)) continue;
1104 TempNewSymbol class_name = Signature::strip_envelope(sig);
1105 if (class_name == ik->name()) continue;
1106 log_info(class, preload)("Preloading of class %s during linking of class %s "
1107 "because of the class is listed in the LoadableDescriptors attribute",
1108 sig->as_C_string(), ik->name()->as_C_string());
1109 oop loader = ik->class_loader();
1110 Klass* klass = SystemDictionary::resolve_or_null(class_name,
1111 Handle(THREAD, loader), THREAD);
1112 if (HAS_PENDING_EXCEPTION) {
1113 CLEAR_PENDING_EXCEPTION;
1114 }
1115 if (klass != nullptr) {
1116 log_info(class, preload)("Preloading of class %s during linking of class %s "
1117 "(cause: LoadableDescriptors attribute) succeeded",
1118 class_name->as_C_string(), ik->name()->as_C_string());
1119 if (!klass->is_inline_klass()) {
1120 // Non value class are allowed by the current spec, but it could be an indication
1121 // of an issue so let's log a warning
1122 log_info(class, preload)("Preloading of class %s during linking of class %s "
1123 "(cause: LoadableDescriptors attribute) but loaded class is not a value class",
1124 class_name->as_C_string(), ik->name()->as_C_string());
1125 }
1126 } else {
1127 log_info(class, preload)("Preloading of class %s during linking of class %s "
1128 "(cause: LoadableDescriptors attribute) failed",
1129 class_name->as_C_string(), ik->name()->as_C_string());
1130 }
1131 }
1132 }
1133 }
1134
1135 void InstanceKlass::link_class(TRAPS) {
1136 assert(is_loaded(), "must be loaded");
1137 if (!is_linked()) {
1138 link_class_impl(CHECK);
1139 }
1140 }
1141
1142 // Called to verify that a class can link during initialization, without
1143 // throwing a VerifyError.
1144 bool InstanceKlass::link_class_or_fail(TRAPS) {
1145 assert(is_loaded(), "must be loaded");
1146 if (!is_linked()) {
1147 link_class_impl(CHECK_false);
1148 }
1149 return is_linked();
1150 }
1151
1152 bool InstanceKlass::link_class_impl(TRAPS) {
1153 if (CDSConfig::is_dumping_static_archive() && SystemDictionaryShared::has_class_failed_verification(this)) {
1154 // This is for CDS static dump only -- we use the in_error_state to indicate that
1155 // the class has failed verification. Throwing the NoClassDefFoundError here is just
1156 // a convenient way to stop repeat attempts to verify the same (bad) class.
1157 //
1158 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
1159 // if we are executing Java code. This is not a problem for CDS dumping phase since
1160 // it doesn't execute any Java code.
1161 ResourceMark rm(THREAD);
1162 // Names are all known to be < 64k so we know this formatted message is not excessively large.
1163 Exceptions::fthrow(THREAD_AND_LOCATION,
1164 vmSymbols::java_lang_NoClassDefFoundError(),
1165 "Class %s, or one of its supertypes, failed class initialization",
1166 external_name());
1167 return false;
1168 }
1169 // return if already verified
1170 if (is_linked()) {
1171 return true;
1172 }
1173
1174 // Timing
1175 // timer handles recursion
1176 JavaThread* jt = THREAD;
1177
1178 // link super class before linking this class
1179 InstanceKlass* super_klass = super();
1180 if (super_klass != nullptr) {
1181 if (super_klass->is_interface()) { // check if super class is an interface
1182 ResourceMark rm(THREAD);
1183 // Names are all known to be < 64k so we know this formatted message is not excessively large.
1184 Exceptions::fthrow(
1185 THREAD_AND_LOCATION,
1186 vmSymbols::java_lang_IncompatibleClassChangeError(),
1187 "class %s has interface %s as super class",
1188 external_name(),
1189 super_klass->external_name()
1190 );
1191 return false;
1192 }
1193
1194 super_klass->link_class_impl(CHECK_false);
1195 }
1196
1197 // link all interfaces implemented by this class before linking this class
1198 Array<InstanceKlass*>* interfaces = local_interfaces();
1199 int num_interfaces = interfaces->length();
1200 for (int index = 0; index < num_interfaces; index++) {
1201 InstanceKlass* interk = interfaces->at(index);
1202 interk->link_class_impl(CHECK_false);
1203 }
1204
1205 if (Arguments::is_valhalla_enabled()) {
1206 // Aggressively preloading all classes from the LoadableDescriptors attribute
1207 // so inline classes can be scalarized in the calling conventions computed below
1208 load_classes_from_loadable_descriptors_attribute(this, THREAD);
1209 assert(!HAS_PENDING_EXCEPTION, "Shouldn't have pending exceptions from call above");
1210 }
1211
1212 // in case the class is linked in the process of linking its superclasses
1213 if (is_linked()) {
1214 return true;
1215 }
1216
1217 // trace only the link time for this klass that includes
1218 // the verification time
1219 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
1220 ClassLoader::perf_class_link_selftime(),
1221 ClassLoader::perf_classes_linked(),
1222 jt->get_thread_stat()->perf_recursion_counts_addr(),
1223 jt->get_thread_stat()->perf_timers_addr(),
1224 PerfClassTraceTime::CLASS_LINK);
1225
1226 // verification & rewriting
1227 {
1228 HandleMark hm(THREAD);
1229 Handle h_init_lock(THREAD, init_lock());
1230 ObjectLocker ol(h_init_lock, CHECK_PREEMPTABLE_false);
1231 // Don't allow preemption if we link/initialize classes below,
1232 // since that would release this monitor while we are in the
1233 // middle of linking this class.
1234 NoPreemptMark npm(THREAD);
1235
1236 // rewritten will have been set if loader constraint error found
1237 // on an earlier link attempt
1238 // don't verify or rewrite if already rewritten
1239 //
1240
1241 if (!is_linked()) {
1242 if (!is_rewritten()) {
1243 if (in_aot_cache()) {
1244 assert(!verified_at_dump_time(), "must be");
1245 }
1246 {
1247 bool verify_ok = verify_code(THREAD);
1248 if (!verify_ok) {
1249 return false;
1250 }
1251 }
1252
1253 // Just in case a side-effect of verify linked this class already
1254 // (which can sometimes happen since the verifier loads classes
1255 // using custom class loaders, which are free to initialize things)
1256 if (is_linked()) {
1257 return true;
1258 }
1259
1260 // also sets rewritten
1261 rewrite_class(CHECK_false);
1262 } else if (in_aot_cache()) {
1263 SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
1264 }
1265
1266 // relocate jsrs and link methods after they are all rewritten
1267 link_methods(CHECK_false);
1268
1269 // Initialize the vtable and interface table after
1270 // methods have been rewritten since rewrite may
1271 // fabricate new Method*s.
1272 // also does loader constraint checking
1273 //
1274 // initialize_vtable and initialize_itable need to be rerun
1275 // for a shared class if
1276 // 1) the class is loaded by custom class loader or
1277 // 2) the class is loaded by built-in class loader but failed to add archived loader constraints or
1278 // 3) the class was not verified during dump time
1279 bool need_init_table = true;
1280 if (in_aot_cache() && verified_at_dump_time() &&
1281 SystemDictionaryShared::check_linking_constraints(THREAD, this)) {
1282 need_init_table = false;
1283 }
1284 if (need_init_table) {
1285 vtable().initialize_vtable_and_check_constraints(CHECK_false);
1286 itable().initialize_itable_and_check_constraints(CHECK_false);
1287 }
1288 #ifdef ASSERT
1289 vtable().verify(tty, true);
1290 // In case itable verification is ever added.
1291 // itable().verify(tty, true);
1292 #endif
1293 if (Universe::is_fully_initialized()) {
1294 DeoptimizationScope deopt_scope;
1295 {
1296 // Now mark all code that assumes the class is not linked.
1297 // Set state under the Compile_lock also.
1298 MutexLocker ml(THREAD, Compile_lock);
1299
1300 set_init_state(linked);
1301 CodeCache::mark_dependents_on(&deopt_scope, this);
1302 }
1303 // Perform the deopt handshake outside Compile_lock.
1304 deopt_scope.deoptimize_marked();
1305 } else {
1306 set_init_state(linked);
1307 }
1308 if (JvmtiExport::should_post_class_prepare()) {
1309 JvmtiExport::post_class_prepare(THREAD, this);
1310 }
1311 }
1312 }
1313
1314 if (log_is_enabled(Info, class, link)) {
1315 ResourceMark rm(THREAD);
1316 log_info(class, link)("Linked class %s", external_name());
1317 }
1318
1319 return true;
1320 }
1321
1322 // Rewrite the byte codes of all of the methods of a class.
1323 // The rewriter must be called exactly once. Rewriting must happen after
1324 // verification but before the first method of the class is executed.
1325 void InstanceKlass::rewrite_class(TRAPS) {
1326 assert(is_loaded(), "must be loaded");
1327 if (is_rewritten()) {
1328 assert(in_aot_cache(), "rewriting an unshared class?");
1329 return;
1330 }
1331 Rewriter::rewrite(this, CHECK);
1332 set_rewritten();
1333 }
1334
1335 // Now relocate and link method entry points after class is rewritten.
1336 // This is outside is_rewritten flag. In case of an exception, it can be
1337 // executed more than once.
1338 void InstanceKlass::link_methods(TRAPS) {
1339 PerfTraceTime timer(ClassLoader::perf_ik_link_methods_time());
1340
1341 int len = methods()->length();
1342 for (int i = len-1; i >= 0; i--) {
1343 methodHandle m(THREAD, methods()->at(i));
1344
1345 // Set up method entry points for compiler and interpreter .
1346 m->link_method(m, CHECK);
1347 }
1348 }
1349
1350 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
1351 void InstanceKlass::initialize_super_interfaces(TRAPS) {
1352 assert (has_nonstatic_concrete_methods(), "caller should have checked this");
1353 for (int i = 0; i < local_interfaces()->length(); ++i) {
1354 InstanceKlass* ik = local_interfaces()->at(i);
1355
1356 // Initialization is depth first search ie. we start with top of the inheritance tree
1357 // has_nonstatic_concrete_methods drives searching superinterfaces since it
1358 // means has_nonstatic_concrete_methods in its superinterface hierarchy
1359 if (ik->has_nonstatic_concrete_methods()) {
1360 ik->initialize_super_interfaces(CHECK);
1361 }
1362
1363 // Only initialize() interfaces that "declare" concrete methods.
1364 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
1365 ik->initialize(CHECK);
1366 }
1367 }
1368 }
1369
1370 using InitializationErrorTable = HashTable<const InstanceKlass*, OopHandle, 107, AnyObj::C_HEAP, mtClass>;
1371 static InitializationErrorTable* _initialization_error_table;
1372
1373 void InstanceKlass::add_initialization_error(JavaThread* current, Handle exception) {
1374 // Create the same exception with a message indicating the thread name,
1375 // and the StackTraceElements.
1376 Handle init_error = java_lang_Throwable::create_initialization_error(current, exception);
1377 ResourceMark rm(current);
1378 if (init_error.is_null()) {
1379 log_trace(class, init)("Unable to create the desired initialization error for class %s", external_name());
1380
1381 // We failed to create the new exception, most likely due to either out-of-memory or
1382 // a stackoverflow error. If the original exception was either of those then we save
1383 // the shared, pre-allocated, stackless, instance of that exception.
1384 if (exception->klass() == vmClasses::StackOverflowError_klass()) {
1385 log_debug(class, init)("Using shared StackOverflowError as initialization error for class %s", external_name());
1386 init_error = Handle(current, Universe::class_init_stack_overflow_error());
1387 } else if (exception->klass() == vmClasses::OutOfMemoryError_klass()) {
1388 log_debug(class, init)("Using shared OutOfMemoryError as initialization error for class %s", external_name());
1389 init_error = Handle(current, Universe::class_init_out_of_memory_error());
1390 } else {
1391 return;
1392 }
1393 }
1394
1395 MutexLocker ml(current, ClassInitError_lock);
1396 OopHandle elem = OopHandle(Universe::vm_global(), init_error());
1397 bool created;
1398 if (_initialization_error_table == nullptr) {
1399 _initialization_error_table = new (mtClass) InitializationErrorTable();
1400 }
1401 _initialization_error_table->put_if_absent(this, elem, &created);
1402 assert(created, "Initialization is single threaded");
1403 log_trace(class, init)("Initialization error added for class %s", external_name());
1404 }
1405
1406 oop InstanceKlass::get_initialization_error(JavaThread* current) {
1407 MutexLocker ml(current, ClassInitError_lock);
1408 if (_initialization_error_table == nullptr) {
1409 return nullptr;
1410 }
1411 OopHandle* h = _initialization_error_table->get(this);
1412 return (h != nullptr) ? h->resolve() : nullptr;
1413 }
1414
1415 // Need to remove entries for unloaded classes.
1416 void InstanceKlass::clean_initialization_error_table() {
1417 struct InitErrorTableCleaner {
1418 bool do_entry(const InstanceKlass* ik, OopHandle h) {
1419 if (!ik->is_loader_alive()) {
1420 h.release(Universe::vm_global());
1421 return true;
1422 } else {
1423 return false;
1424 }
1425 }
1426 };
1427
1428 assert_locked_or_safepoint(ClassInitError_lock);
1429 InitErrorTableCleaner cleaner;
1430 if (_initialization_error_table != nullptr) {
1431 _initialization_error_table->unlink(&cleaner);
1432 }
1433 }
1434
1435 class ThreadWaitingForClassInit : public StackObj {
1436 JavaThread* _thread;
1437 public:
1438 ThreadWaitingForClassInit(JavaThread* thread, InstanceKlass* ik) : _thread(thread) {
1439 _thread->set_class_to_be_initialized(ik);
1440 }
1441 ~ThreadWaitingForClassInit() {
1442 _thread->set_class_to_be_initialized(nullptr);
1443 }
1444 };
1445
1446 void InstanceKlass::initialize_impl(TRAPS) {
1447 HandleMark hm(THREAD);
1448
1449 // Make sure klass is linked (verified) before initialization
1450 // A class could already be verified, since it has been reflected upon.
1451 link_class(CHECK);
1452
1453 DTRACE_CLASSINIT_PROBE(required, -1);
1454
1455 bool wait = false;
1456
1457 JavaThread* jt = THREAD;
1458
1459 bool debug_logging_enabled = log_is_enabled(Debug, class, init);
1460
1461 // refer to the JVM book page 47 for description of steps
1462 // Step 1
1463 {
1464 Handle h_init_lock(THREAD, init_lock());
1465 ObjectLocker ol(h_init_lock, CHECK_PREEMPTABLE);
1466
1467 // Step 2
1468 // If we were to use wait() instead of waitInterruptibly() then
1469 // we might end up throwing IE from link/symbol resolution sites
1470 // that aren't expected to throw. This would wreak havoc. See 6320309.
1471 while (is_being_initialized() && !is_reentrant_initialization(jt)) {
1472 if (debug_logging_enabled) {
1473 ResourceMark rm(jt);
1474 log_debug(class, init)("Thread \"%s\" waiting for initialization of %s by thread \"%s\"",
1475 jt->name(), external_name(), init_thread_name());
1476 }
1477 wait = true;
1478 ThreadWaitingForClassInit twcl(THREAD, this);
1479 ol.wait_uninterruptibly(CHECK_PREEMPTABLE);
1480 }
1481
1482 // Step 3
1483 if (is_being_initialized() && is_reentrant_initialization(jt)) {
1484 if (debug_logging_enabled) {
1485 ResourceMark rm(jt);
1486 log_debug(class, init)("Thread \"%s\" recursively initializing %s",
1487 jt->name(), external_name());
1488 }
1489 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
1490 return;
1491 }
1492
1493 // Step 4
1494 if (is_initialized()) {
1495 if (debug_logging_enabled) {
1496 ResourceMark rm(jt);
1497 log_debug(class, init)("Thread \"%s\" found %s already initialized",
1498 jt->name(), external_name());
1499 }
1500 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
1501 return;
1502 }
1503
1504 // Step 5
1505 if (is_in_error_state()) {
1506 if (debug_logging_enabled) {
1507 ResourceMark rm(jt);
1508 log_debug(class, init)("Thread \"%s\" found %s is in error state",
1509 jt->name(), external_name());
1510 }
1511
1512 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
1513 ResourceMark rm(THREAD);
1514 Handle cause(THREAD, get_initialization_error(THREAD));
1515
1516 stringStream ss;
1517 ss.print("Could not initialize class %s", external_name());
1518 if (cause.is_null()) {
1519 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string());
1520 } else {
1521 THROW_MSG_CAUSE(vmSymbols::java_lang_NoClassDefFoundError(),
1522 ss.as_string(), cause);
1523 }
1524 } else {
1525
1526 // Step 6
1527 set_init_state(being_initialized);
1528 set_init_thread(jt);
1529 if (debug_logging_enabled) {
1530 ResourceMark rm(jt);
1531 log_debug(class, init)("Thread \"%s\" is initializing %s",
1532 jt->name(), external_name());
1533 }
1534 }
1535 }
1536
1537 // Block preemption once we are the initializer thread. Unmounting now
1538 // would complicate the reentrant case (identity is platform thread).
1539 NoPreemptMark npm(THREAD);
1540
1541 // Pre-allocating an all-zero value to be used to reset nullable flat storages
1542 if (is_inline_klass()) {
1543 InlineKlass* vk = InlineKlass::cast(this);
1544 if (vk->supports_nullable_layouts()) {
1545 oop val = vk->allocate_instance(THREAD);
1546 if (HAS_PENDING_EXCEPTION) {
1547 Handle e(THREAD, PENDING_EXCEPTION);
1548 CLEAR_PENDING_EXCEPTION;
1549 {
1550 EXCEPTION_MARK;
1551 add_initialization_error(THREAD, e);
1552 // Locks object, set state, and notify all waiting threads
1553 set_initialization_state_and_notify(initialization_error, THREAD);
1554 CLEAR_PENDING_EXCEPTION;
1555 }
1556 THROW_OOP(e());
1557 }
1558 vk->set_null_reset_value(val);
1559 }
1560 }
1561
1562 // Step 7
1563 // Next, if C is a class rather than an interface, initialize it's super class and super
1564 // interfaces.
1565 if (!is_interface()) {
1566 Klass* super_klass = super();
1567 if (super_klass != nullptr && super_klass->should_be_initialized()) {
1568 super_klass->initialize(THREAD);
1569 }
1570 // If C implements any interface that declares a non-static, concrete method,
1571 // the initialization of C triggers initialization of its super interfaces.
1572 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
1573 // having a superinterface that declares, non-static, concrete methods
1574 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) {
1575 initialize_super_interfaces(THREAD);
1576 }
1577
1578 // If any exceptions, complete abruptly, throwing the same exception as above.
1579 if (HAS_PENDING_EXCEPTION) {
1580 Handle e(THREAD, PENDING_EXCEPTION);
1581 CLEAR_PENDING_EXCEPTION;
1582 {
1583 EXCEPTION_MARK;
1584 add_initialization_error(THREAD, e);
1585 // Locks object, set state, and notify all waiting threads
1586 set_initialization_state_and_notify(initialization_error, THREAD);
1587 CLEAR_PENDING_EXCEPTION;
1588 }
1589 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1590 THROW_OOP(e());
1591 }
1592 }
1593
1594
1595 // Step 8
1596 {
1597 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1598 if (class_initializer() != nullptr) {
1599 // Timer includes any side effects of class initialization (resolution,
1600 // etc), but not recursive entry into call_class_initializer().
1601 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1602 ClassLoader::perf_class_init_selftime(),
1603 ClassLoader::perf_classes_inited(),
1604 jt->get_thread_stat()->perf_recursion_counts_addr(),
1605 jt->get_thread_stat()->perf_timers_addr(),
1606 PerfClassTraceTime::CLASS_CLINIT);
1607 call_class_initializer(THREAD);
1608 } else {
1609 // The elapsed time is so small it's not worth counting.
1610 if (UsePerfData) {
1611 ClassLoader::perf_classes_inited()->inc();
1612 }
1613 call_class_initializer(THREAD);
1614 }
1615
1616 if (has_strict_static_fields() && !HAS_PENDING_EXCEPTION) {
1617 // Step 9 also verifies that strict static fields have been initialized.
1618 // Status bits were set in ClassFileParser::post_process_parsed_stream.
1619 // After <clinit>, bits must all be clear, or else we must throw an error.
1620 // This is an extremely fast check, so we won't bother with a timer.
1621 assert(fields_status() != nullptr, "");
1622 Symbol* bad_strict_static = nullptr;
1623 for (int index = 0; index < fields_status()->length(); index++) {
1624 // Very fast loop over single byte array looking for a set bit.
1625 if (fields_status()->adr_at(index)->is_strict_static_unset()) {
1626 // This strict static field has not been set by the class initializer.
1627 // Note that in the common no-error case, we read no field metadata.
1628 // We only unpack it when we need to report an error.
1629 FieldInfo fi = field(index);
1630 bad_strict_static = fi.name(constants());
1631 if (debug_logging_enabled) {
1632 ResourceMark rm(jt);
1633 const char* msg = format_strict_static_message(bad_strict_static);
1634 log_debug(class, init)("%s", msg);
1635 } else {
1636 // If we are not logging, do not bother to look for a second offense.
1637 break;
1638 }
1639 }
1640 }
1641 if (bad_strict_static != nullptr) {
1642 throw_strict_static_exception(bad_strict_static, "is unset after initialization of", THREAD);
1643 }
1644 }
1645 }
1646
1647 // Step 9
1648 if (!HAS_PENDING_EXCEPTION) {
1649 set_initialization_state_and_notify(fully_initialized, CHECK);
1650 DEBUG_ONLY(vtable().verify(tty, true);)
1651 CompilationPolicy::replay_training_at_init(this, THREAD);
1652 }
1653 else {
1654 // Step 10 and 11
1655 Handle e(THREAD, PENDING_EXCEPTION);
1656 CLEAR_PENDING_EXCEPTION;
1657 // JVMTI has already reported the pending exception
1658 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1659 JvmtiExport::clear_detected_exception(jt);
1660 {
1661 EXCEPTION_MARK;
1662 add_initialization_error(THREAD, e);
1663 set_initialization_state_and_notify(initialization_error, THREAD);
1664 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below
1665 // JVMTI has already reported the pending exception
1666 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1667 JvmtiExport::clear_detected_exception(jt);
1668 }
1669 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1670 if (e->is_a(vmClasses::Error_klass())) {
1671 THROW_OOP(e());
1672 } else {
1673 JavaCallArguments args(e);
1674 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
1675 vmSymbols::throwable_void_signature(),
1676 &args);
1677 }
1678 }
1679 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1680 }
1681
1682
1683 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
1684 Handle h_init_lock(THREAD, init_lock());
1685 if (h_init_lock() != nullptr) {
1686 ObjectLocker ol(h_init_lock, THREAD);
1687 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1688 set_init_state(state);
1689 fence_and_clear_init_lock();
1690 ol.notify_all(CHECK);
1691 } else {
1692 assert(h_init_lock() != nullptr, "The initialization state should never be set twice");
1693 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1694 set_init_state(state);
1695 }
1696 }
1697
1698 void InstanceKlass::notify_strict_static_access(int field_index, bool is_writing, TRAPS) {
1699 guarantee(field_index >= 0 && field_index < fields_status()->length(), "valid field index");
1700 DEBUG_ONLY(FieldInfo debugfi = field(field_index));
1701 assert(debugfi.access_flags().is_strict(), "");
1702 assert(debugfi.access_flags().is_static(), "");
1703 FieldStatus& fs = *fields_status()->adr_at(field_index);
1704 LogTarget(Trace, class, init) lt;
1705 if (lt.is_enabled()) {
1706 ResourceMark rm(THREAD);
1707 LogStream ls(lt);
1708 FieldInfo fi = field(field_index);
1709 ls.print("notify %s %s %s%s ",
1710 external_name(), is_writing? "Write" : "Read",
1711 fs.is_strict_static_unset() ? "Unset" : "(set)",
1712 fs.is_strict_static_unread() ? "+Unread" : "");
1713 fi.print(&ls, constants());
1714 }
1715 if (fs.is_strict_static_unset()) {
1716 assert(fs.is_strict_static_unread(), "ClassFileParser resp.");
1717 // If it is not set, there are only two reasonable things we can do here:
1718 // - mark it set if this is putstatic
1719 // - throw an error (Read-Before-Write) if this is getstatic
1720
1721 // The unset state is (or should be) transient, and observable only in one
1722 // thread during the execution of <clinit>. Something is wrong here as this
1723 // should not be possible
1724 guarantee(is_reentrant_initialization(THREAD), "unscoped access to strict static");
1725 if (is_writing) {
1726 // clear the "unset" bit, since the field is actually going to be written
1727 fs.update_strict_static_unset(false);
1728 } else {
1729 // throw an IllegalStateException, since we are reading before writing
1730 // see also InstanceKlass::initialize_impl, Step 8 (at end)
1731 Symbol* bad_strict_static = field(field_index).name(constants());
1732 throw_strict_static_exception(bad_strict_static, "is unset before first read in", CHECK);
1733 }
1734 } else {
1735 // Ensure no write after read for final strict statics
1736 FieldInfo fi = field(field_index);
1737 bool is_final = fi.access_flags().is_final();
1738 if (is_final) {
1739 // no final write after read, so observing a constant freezes it, as if <clinit> ended early
1740 // (maybe we could trust the constant a little earlier, before <clinit> ends)
1741 if (is_writing && !fs.is_strict_static_unread()) {
1742 Symbol* bad_strict_static = fi.name(constants());
1743 throw_strict_static_exception(bad_strict_static, "is set after read (as final) in", CHECK);
1744 } else if (!is_writing && fs.is_strict_static_unread()) {
1745 fs.update_strict_static_unread(false);
1746 }
1747 }
1748 }
1749 }
1750
1751 void InstanceKlass::throw_strict_static_exception(Symbol* field_name, const char* when, TRAPS) {
1752 ResourceMark rm(THREAD);
1753 const char* msg = format_strict_static_message(field_name, when);
1754 THROW_MSG(vmSymbols::java_lang_IllegalStateException(), msg);
1755 }
1756
1757 const char* InstanceKlass::format_strict_static_message(Symbol* field_name, const char* when) {
1758 stringStream ss;
1759 ss.print("Strict static \"%s\" %s %s",
1760 field_name->as_C_string(),
1761 when == nullptr ? "is unset in" : when,
1762 external_name());
1763 return ss.as_string();
1764 }
1765
1766 // Update hierarchy. This is done before the new klass has been added to the SystemDictionary. The Compile_lock
1767 // is grabbed, to ensure that the compiler is not using the class hierarchy.
1768 void InstanceKlass::add_to_hierarchy(JavaThread* current) {
1769 assert(!SafepointSynchronize::is_at_safepoint(), "must NOT be at safepoint");
1770
1771 DeoptimizationScope deopt_scope;
1772 {
1773 MutexLocker ml(current, Compile_lock);
1774
1775 set_init_state(InstanceKlass::loaded);
1776 // make sure init_state store is already done.
1777 // The compiler reads the hierarchy outside of the Compile_lock.
1778 // Access ordering is used to add to hierarchy.
1779
1780 // Link into hierarchy.
1781 append_to_sibling_list(); // add to superklass/sibling list
1782 process_interfaces(); // handle all "implements" declarations
1783
1784 // Now mark all code that depended on old class hierarchy.
1785 // Note: must be done *after* linking k into the hierarchy (was bug 12/9/97)
1786 if (Universe::is_fully_initialized()) {
1787 CodeCache::mark_dependents_on(&deopt_scope, this);
1788 }
1789 }
1790 // Perform the deopt handshake outside Compile_lock.
1791 deopt_scope.deoptimize_marked();
1792 }
1793
1794
1795 InstanceKlass* InstanceKlass::implementor() const {
1796 InstanceKlass* volatile* ik = adr_implementor();
1797 if (ik == nullptr) {
1798 return nullptr;
1799 } else {
1800 // This load races with inserts, and therefore needs acquire.
1801 InstanceKlass* ikls = AtomicAccess::load_acquire(ik);
1802 if (ikls != nullptr && !ikls->is_loader_alive()) {
1803 return nullptr; // don't return unloaded class
1804 } else {
1805 return ikls;
1806 }
1807 }
1808 }
1809
1810
1811 void InstanceKlass::set_implementor(InstanceKlass* ik) {
1812 assert_locked_or_safepoint(Compile_lock);
1813 assert(is_interface(), "not interface");
1814 InstanceKlass* volatile* addr = adr_implementor();
1815 assert(addr != nullptr, "null addr");
1816 if (addr != nullptr) {
1817 AtomicAccess::release_store(addr, ik);
1818 }
1819 }
1820
1821 int InstanceKlass::nof_implementors() const {
1822 InstanceKlass* ik = implementor();
1823 if (ik == nullptr) {
1824 return 0;
1825 } else if (ik != this) {
1826 return 1;
1827 } else {
1828 return 2;
1829 }
1830 }
1831
1832 // The embedded _implementor field can only record one implementor.
1833 // When there are more than one implementors, the _implementor field
1834 // is set to the interface Klass* itself. Following are the possible
1835 // values for the _implementor field:
1836 // null - no implementor
1837 // implementor Klass* - one implementor
1838 // self - more than one implementor
1839 //
1840 // The _implementor field only exists for interfaces.
1841 void InstanceKlass::add_implementor(InstanceKlass* ik) {
1842 if (Universe::is_fully_initialized()) {
1843 assert_lock_strong(Compile_lock);
1844 }
1845 assert(is_interface(), "not interface");
1846 // Filter out my subinterfaces.
1847 // (Note: Interfaces are never on the subklass list.)
1848 if (ik->is_interface()) return;
1849
1850 // Filter out subclasses whose supers already implement me.
1851 // (Note: CHA must walk subclasses of direct implementors
1852 // in order to locate indirect implementors.)
1853 InstanceKlass* super_ik = ik->super();
1854 if (super_ik != nullptr && super_ik->implements_interface(this))
1855 // We only need to check one immediate superclass, since the
1856 // implements_interface query looks at transitive_interfaces.
1857 // Any supers of the super have the same (or fewer) transitive_interfaces.
1858 return;
1859
1860 InstanceKlass* iklass = implementor();
1861 if (iklass == nullptr) {
1862 set_implementor(ik);
1863 } else if (iklass != this && iklass != ik) {
1864 // There is already an implementor. Use itself as an indicator of
1865 // more than one implementors.
1866 set_implementor(this);
1867 }
1868
1869 // The implementor also implements the transitive_interfaces
1870 for (int index = 0; index < local_interfaces()->length(); index++) {
1871 local_interfaces()->at(index)->add_implementor(ik);
1872 }
1873 }
1874
1875 void InstanceKlass::init_implementor() {
1876 if (is_interface()) {
1877 set_implementor(nullptr);
1878 }
1879 }
1880
1881
1882 void InstanceKlass::process_interfaces() {
1883 // link this class into the implementors list of every interface it implements
1884 for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1885 assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
1886 InstanceKlass* interf = local_interfaces()->at(i);
1887 assert(interf->is_interface(), "expected interface");
1888 interf->add_implementor(this);
1889 }
1890 }
1891
1892 bool InstanceKlass::can_be_primary_super_slow() const {
1893 if (is_interface())
1894 return false;
1895 else
1896 return Klass::can_be_primary_super_slow();
1897 }
1898
1899 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1900 Array<InstanceKlass*>* transitive_interfaces) {
1901 // The secondaries are the implemented interfaces.
1902 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1903 // (but it's safe to do here because we won't write into _secondary_supers from this point on).
1904 Array<Klass*>* interfaces = (Array<Klass*>*)(address)transitive_interfaces;
1905 int num_secondaries = num_extra_slots + interfaces->length();
1906 if (num_secondaries == 0) {
1907 // Must share this for correct bootstrapping!
1908 set_secondary_supers(Universe::the_empty_klass_array(), Universe::the_empty_klass_bitmap());
1909 return nullptr;
1910 } else if (num_extra_slots == 0 && interfaces->length() <= 1) {
1911 // We will reuse the transitive interfaces list if we're certain
1912 // it's in hash order.
1913 uintx bitmap = compute_secondary_supers_bitmap(interfaces);
1914 set_secondary_supers(interfaces, bitmap);
1915 return nullptr;
1916 }
1917 // Copy transitive interfaces to a temporary growable array to be constructed
1918 // into the secondary super list with extra slots.
1919 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1920 for (int i = 0; i < interfaces->length(); i++) {
1921 secondaries->push(interfaces->at(i));
1922 }
1923 return secondaries;
1924 }
1925
1926 bool InstanceKlass::implements_interface(Klass* k) const {
1927 if (this == k) return true;
1928 assert(k->is_interface(), "should be an interface class");
1929 for (int i = 0; i < transitive_interfaces()->length(); i++) {
1930 if (transitive_interfaces()->at(i) == k) {
1931 return true;
1932 }
1933 }
1934 return false;
1935 }
1936
1937 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1938 // Verify direct super interface
1939 if (this == k) return true;
1940 assert(k->is_interface(), "should be an interface class");
1941 for (int i = 0; i < local_interfaces()->length(); i++) {
1942 if (local_interfaces()->at(i) == k) {
1943 return true;
1944 }
1945 }
1946 return false;
1947 }
1948
1949 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1950 if (TraceFinalizerRegistration) {
1951 tty->print("Registered ");
1952 i->print_value_on(tty);
1953 tty->print_cr(" (" PTR_FORMAT ") as finalizable", p2i(i));
1954 }
1955 instanceHandle h_i(THREAD, i);
1956 // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1957 JavaValue result(T_VOID);
1958 JavaCallArguments args(h_i);
1959 methodHandle mh(THREAD, Universe::finalizer_register_method());
1960 JavaCalls::call(&result, mh, &args, CHECK_NULL);
1961 MANAGEMENT_ONLY(FinalizerService::on_register(h_i(), THREAD);)
1962 return h_i();
1963 }
1964
1965 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1966 assert(!is_abstract() && !is_interface(), "Should not create this object");
1967 size_t size = size_helper(); // Query before forming handle.
1968 return (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL);
1969 }
1970
1971 instanceOop InstanceKlass::allocate_instance(oop java_class, TRAPS) {
1972 Klass* k = java_lang_Class::as_Klass(java_class);
1973 if (k == nullptr) {
1974 ResourceMark rm(THREAD);
1975 THROW_(vmSymbols::java_lang_InstantiationException(), nullptr);
1976 }
1977 InstanceKlass* ik = cast(k);
1978 ik->check_valid_for_instantiation(false, CHECK_NULL);
1979 ik->initialize(CHECK_NULL);
1980 return ik->allocate_instance(THREAD);
1981 }
1982
1983 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) {
1984 return instanceHandle(THREAD, allocate_instance(THREAD));
1985 }
1986
1987 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1988 if (is_interface() || is_abstract()) {
1989 ResourceMark rm(THREAD);
1990 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1991 : vmSymbols::java_lang_InstantiationException(), external_name());
1992 }
1993 if (this == vmClasses::Class_klass()) {
1994 ResourceMark rm(THREAD);
1995 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1996 : vmSymbols::java_lang_IllegalAccessException(), external_name());
1997 }
1998 }
1999
2000 ArrayKlass* InstanceKlass::array_klass(int n, TRAPS) {
2001 // Need load-acquire for lock-free read
2002 if (array_klasses_acquire() == nullptr) {
2003
2004 // Recursively lock array allocation
2005 RecursiveLocker rl(MultiArray_lock, THREAD);
2006
2007 // Check if another thread created the array klass while we were waiting for the lock.
2008 if (array_klasses() == nullptr) {
2009 ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL);
2010 // use 'release' to pair with lock-free load
2011 release_set_array_klasses(k);
2012 }
2013 }
2014
2015 // array_klasses() will always be set at this point
2016 ArrayKlass* ak = array_klasses();
2017 assert(ak != nullptr, "should be set");
2018 return ak->array_klass(n, THREAD);
2019 }
2020
2021 ArrayKlass* InstanceKlass::array_klass_or_null(int n) {
2022 // Need load-acquire for lock-free read
2023 ArrayKlass* ak = array_klasses_acquire();
2024 if (ak == nullptr) {
2025 return nullptr;
2026 } else {
2027 return ak->array_klass_or_null(n);
2028 }
2029 }
2030
2031 ArrayKlass* InstanceKlass::array_klass(TRAPS) {
2032 return array_klass(1, THREAD);
2033 }
2034
2035 ArrayKlass* InstanceKlass::array_klass_or_null() {
2036 return array_klass_or_null(1);
2037 }
2038
2039 Method* InstanceKlass::class_initializer() const {
2040 Method* clinit = find_method(
2041 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
2042 if (clinit != nullptr && clinit->is_class_initializer()) {
2043 return clinit;
2044 }
2045 return nullptr;
2046 }
2047
2048 void InstanceKlass::call_class_initializer(TRAPS) {
2049 if (ReplayCompiles &&
2050 (ReplaySuppressInitializers == 1 ||
2051 (ReplaySuppressInitializers >= 2 && class_loader() != nullptr))) {
2052 // Hide the existence of the initializer for the purpose of replaying the compile
2053 return;
2054 }
2055
2056 #if INCLUDE_CDS
2057 // This is needed to ensure the consistency of the archived heap objects.
2058 if (has_aot_initialized_mirror() && CDSConfig::is_loading_heap()) {
2059 AOTClassInitializer::call_runtime_setup(THREAD, this);
2060 return;
2061 } else if (has_archived_enum_objs()) {
2062 assert(in_aot_cache(), "must be");
2063 bool initialized = CDSEnumKlass::initialize_enum_klass(this, CHECK);
2064 if (initialized) {
2065 return;
2066 }
2067 }
2068 #endif
2069
2070 methodHandle h_method(THREAD, class_initializer());
2071 assert(!is_initialized(), "we cannot initialize twice");
2072 LogTarget(Info, class, init) lt;
2073 if (lt.is_enabled()) {
2074 ResourceMark rm(THREAD);
2075 LogStream ls(lt);
2076 ls.print("%d Initializing ", call_class_initializer_counter++);
2077 name()->print_value_on(&ls);
2078 ls.print_cr("%s (" PTR_FORMAT ") by thread \"%s\"",
2079 h_method() == nullptr ? "(no method)" : "", p2i(this),
2080 THREAD->name());
2081 }
2082 if (h_method() != nullptr) {
2083 ThreadInClassInitializer ticl(THREAD, this); // Track class being initialized
2084 JavaCallArguments args; // No arguments
2085 JavaValue result(T_VOID);
2086 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
2087 }
2088 }
2089
2090 // If a class that implements this interface is initialized, is the JVM required
2091 // to first execute a <clinit> method declared in this interface,
2092 // or (if also_check_supers==true) any of the super types of this interface?
2093 //
2094 // JVMS 5.5. Initialization, step 7: Next, if C is a class rather than
2095 // an interface, then let SC be its superclass and let SI1, ..., SIn
2096 // be all superinterfaces of C (whether direct or indirect) that
2097 // declare at least one non-abstract, non-static method.
2098 //
2099 // So when an interface is initialized, it does not look at its
2100 // supers. But a proper class will ensure that all of its supers have
2101 // run their <clinit> methods, except that it disregards interfaces
2102 // that lack a non-static concrete method (i.e., a default method).
2103 // Therefore, you should probably call this method only when the
2104 // current class is a super of some proper class, not an interface.
2105 bool InstanceKlass::interface_needs_clinit_execution_as_super(bool also_check_supers) const {
2106 assert(is_interface(), "must be");
2107
2108 if (!has_nonstatic_concrete_methods()) {
2109 // quick check: no nonstatic concrete methods are declared by this or any super interfaces
2110 return false;
2111 }
2112
2113 // JVMS 5.5. Initialization
2114 // ...If C is an interface that declares a non-abstract,
2115 // non-static method, the initialization of a class that
2116 // implements C directly or indirectly.
2117 if (declares_nonstatic_concrete_methods() && class_initializer() != nullptr) {
2118 return true;
2119 }
2120 if (also_check_supers) {
2121 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2122 for (int i = 0; i < all_ifs->length(); ++i) {
2123 InstanceKlass* super_intf = all_ifs->at(i);
2124 if (super_intf->declares_nonstatic_concrete_methods() && super_intf->class_initializer() != nullptr) {
2125 return true;
2126 }
2127 }
2128 }
2129 return false;
2130 }
2131
2132 void InstanceKlass::mask_for(const methodHandle& method, int bci,
2133 InterpreterOopMap* entry_for) {
2134 // Lazily create the _oop_map_cache at first request.
2135 // Load_acquire is needed to safely get instance published with CAS by another thread.
2136 OopMapCache* oop_map_cache = AtomicAccess::load_acquire(&_oop_map_cache);
2137 if (oop_map_cache == nullptr) {
2138 // Try to install new instance atomically.
2139 oop_map_cache = new OopMapCache();
2140 OopMapCache* other = AtomicAccess::cmpxchg(&_oop_map_cache, (OopMapCache*)nullptr, oop_map_cache);
2141 if (other != nullptr) {
2142 // Someone else managed to install before us, ditch local copy and use the existing one.
2143 delete oop_map_cache;
2144 oop_map_cache = other;
2145 }
2146 }
2147 // _oop_map_cache is constant after init; lookup below does its own locking.
2148 oop_map_cache->lookup(method, bci, entry_for);
2149 }
2150
2151
2152 FieldInfo InstanceKlass::field(int index) const {
2153 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2154 if (fs.index() == index) {
2155 return fs.to_FieldInfo();
2156 }
2157 }
2158 fatal("Field not found");
2159 return FieldInfo();
2160 }
2161
2162 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2163 JavaFieldStream fs(this);
2164 if (fs.lookup(name, sig)) {
2165 assert(fs.name() == name, "name must match");
2166 assert(fs.signature() == sig, "signature must match");
2167 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
2168 return true;
2169 }
2170 return false;
2171 }
2172
2173
2174 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2175 const int n = local_interfaces()->length();
2176 for (int i = 0; i < n; i++) {
2177 InstanceKlass* intf1 = local_interfaces()->at(i);
2178 assert(intf1->is_interface(), "just checking type");
2179 // search for field in current interface
2180 if (intf1->find_local_field(name, sig, fd)) {
2181 assert(fd->is_static(), "interface field must be static");
2182 return intf1;
2183 }
2184 // search for field in direct superinterfaces
2185 Klass* intf2 = intf1->find_interface_field(name, sig, fd);
2186 if (intf2 != nullptr) return intf2;
2187 }
2188 // otherwise field lookup fails
2189 return nullptr;
2190 }
2191
2192
2193 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2194 // search order according to newest JVM spec (5.4.3.2, p.167).
2195 // 1) search for field in current klass
2196 if (find_local_field(name, sig, fd)) {
2197 return const_cast<InstanceKlass*>(this);
2198 }
2199 // 2) search for field recursively in direct superinterfaces
2200 { Klass* intf = find_interface_field(name, sig, fd);
2201 if (intf != nullptr) return intf;
2202 }
2203 // 3) apply field lookup recursively if superclass exists
2204 { InstanceKlass* supr = super();
2205 if (supr != nullptr) return supr->find_field(name, sig, fd);
2206 }
2207 // 4) otherwise field lookup fails
2208 return nullptr;
2209 }
2210
2211
2212 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
2213 // search order according to newest JVM spec (5.4.3.2, p.167).
2214 // 1) search for field in current klass
2215 if (find_local_field(name, sig, fd)) {
2216 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
2217 }
2218 // 2) search for field recursively in direct superinterfaces
2219 if (is_static) {
2220 Klass* intf = find_interface_field(name, sig, fd);
2221 if (intf != nullptr) return intf;
2222 }
2223 // 3) apply field lookup recursively if superclass exists
2224 { InstanceKlass* supr = super();
2225 if (supr != nullptr) return supr->find_field(name, sig, is_static, fd);
2226 }
2227 // 4) otherwise field lookup fails
2228 return nullptr;
2229 }
2230
2231 bool InstanceKlass::contains_field_offset(int offset) {
2232 if (this->is_inline_klass()) {
2233 InlineKlass* vk = InlineKlass::cast(this);
2234 return offset >= vk->payload_offset() && offset < (vk->payload_offset() + vk->payload_size_in_bytes());
2235 } else {
2236 fieldDescriptor fd;
2237 return find_field_from_offset(offset, false, &fd);
2238 }
2239 }
2240
2241 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
2242 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2243 if (fs.offset() == offset) {
2244 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
2245 if (fd->is_static() == is_static) return true;
2246 }
2247 }
2248 return false;
2249 }
2250
2251
2252 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
2253 const InstanceKlass* klass = this;
2254 while (klass != nullptr) {
2255 if (klass->find_local_field_from_offset(offset, is_static, fd)) {
2256 return true;
2257 }
2258 klass = klass->super();
2259 }
2260 return false;
2261 }
2262
2263 bool InstanceKlass::find_local_flat_field_containing_offset(int offset, fieldDescriptor* fd) const {
2264 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2265 if (!fs.is_flat()) {
2266 continue;
2267 }
2268
2269 if (fs.offset() > offset) {
2270 continue;
2271 }
2272
2273 const int offset_in_flat_field = offset - fs.offset();
2274 const InlineLayoutInfo layout_info = inline_layout_info(fs.index());
2275 const int field_size = layout_info.klass()->layout_size_in_bytes(layout_info.kind());
2276
2277 assert(LayoutKindHelper::is_flat(layout_info.kind()), "Must be flat");
2278
2279 if (offset_in_flat_field < field_size) {
2280 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
2281 assert(!fd->is_static(), "Static fields are not flattened");
2282
2283 return true;
2284 }
2285 }
2286
2287 return false;
2288 }
2289
2290 bool InstanceKlass::find_flat_field_containing_offset(int offset, fieldDescriptor* fd) const {
2291 const InstanceKlass* klass = this;
2292 while (klass != nullptr) {
2293 if (klass->find_local_flat_field_containing_offset(offset, fd)) {
2294 return true;
2295 }
2296
2297 klass = klass->super();
2298 }
2299
2300 return false;
2301 }
2302
2303 void InstanceKlass::methods_do(void f(Method* method)) {
2304 // Methods aren't stable until they are loaded. This can be read outside
2305 // a lock through the ClassLoaderData for profiling
2306 // Redefined scratch classes are on the list and need to be cleaned
2307 if (!is_loaded() && !is_scratch_class()) {
2308 return;
2309 }
2310
2311 int len = methods()->length();
2312 for (int index = 0; index < len; index++) {
2313 Method* m = methods()->at(index);
2314 assert(m->is_method(), "must be method");
2315 f(m);
2316 }
2317 }
2318
2319
2320 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
2321 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2322 if (fs.access_flags().is_static()) {
2323 fieldDescriptor& fd = fs.field_descriptor();
2324 cl->do_field(&fd);
2325 }
2326 }
2327 }
2328
2329
2330 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
2331 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2332 if (fs.access_flags().is_static()) {
2333 fieldDescriptor& fd = fs.field_descriptor();
2334 f(&fd, mirror, CHECK);
2335 }
2336 }
2337 }
2338
2339 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
2340 InstanceKlass* super = this->super();
2341 if (super != nullptr) {
2342 super->do_nonstatic_fields(cl);
2343 }
2344 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2345 fieldDescriptor& fd = fs.field_descriptor();
2346 if (!fd.is_static()) {
2347 cl->do_field(&fd);
2348 }
2349 }
2350 }
2351
2352 static int compare_fields_by_offset(FieldInfo* a, FieldInfo* b) {
2353 return a->offset() - b->offset();
2354 }
2355
2356 void InstanceKlass::print_nonstatic_fields(FieldClosure* cl) {
2357 InstanceKlass* super = this->super();
2358 if (super != nullptr) {
2359 super->print_nonstatic_fields(cl);
2360 }
2361 ResourceMark rm;
2362 // In DebugInfo nonstatic fields are sorted by offset.
2363 GrowableArray<FieldInfo> fields_sorted;
2364 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2365 if (!fs.access_flags().is_static()) {
2366 fields_sorted.push(fs.to_FieldInfo());
2367 }
2368 }
2369 int length = fields_sorted.length();
2370 if (length > 0) {
2371 fields_sorted.sort(compare_fields_by_offset);
2372 fieldDescriptor fd;
2373 for (int i = 0; i < length; i++) {
2374 fd.reinitialize(this, fields_sorted.at(i));
2375 assert(!fd.is_static() && fd.offset() == checked_cast<int>(fields_sorted.at(i).offset()), "only nonstatic fields");
2376 cl->do_field(&fd);
2377 }
2378 }
2379 }
2380
2381 #ifdef ASSERT
2382 static int linear_search(const Array<Method*>* methods,
2383 const Symbol* name,
2384 const Symbol* signature) {
2385 const int len = methods->length();
2386 for (int index = 0; index < len; index++) {
2387 const Method* const m = methods->at(index);
2388 assert(m->is_method(), "must be method");
2389 if (m->signature() == signature && m->name() == name) {
2390 return index;
2391 }
2392 }
2393 return -1;
2394 }
2395 #endif
2396
2397 bool InstanceKlass::_disable_method_binary_search = false;
2398
2399 NOINLINE int linear_search(const Array<Method*>* methods, const Symbol* name) {
2400 int len = methods->length();
2401 int l = 0;
2402 int h = len - 1;
2403 while (l <= h) {
2404 Method* m = methods->at(l);
2405 if (m->name() == name) {
2406 return l;
2407 }
2408 l++;
2409 }
2410 return -1;
2411 }
2412
2413 inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
2414 if (_disable_method_binary_search) {
2415 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
2416 // At the final stage of dynamic dumping, the methods array may not be sorted
2417 // by ascending addresses of their names, so we can't use binary search anymore.
2418 // However, methods with the same name are still laid out consecutively inside the
2419 // methods array, so let's look for the first one that matches.
2420 return linear_search(methods, name);
2421 }
2422
2423 int len = methods->length();
2424 int l = 0;
2425 int h = len - 1;
2426
2427 // methods are sorted by ascending addresses of their names, so do binary search
2428 while (l <= h) {
2429 int mid = (l + h) >> 1;
2430 Method* m = methods->at(mid);
2431 assert(m->is_method(), "must be method");
2432 int res = m->name()->fast_compare(name);
2433 if (res == 0) {
2434 return mid;
2435 } else if (res < 0) {
2436 l = mid + 1;
2437 } else {
2438 h = mid - 1;
2439 }
2440 }
2441 return -1;
2442 }
2443
2444 // find_method looks up the name/signature in the local methods array
2445 Method* InstanceKlass::find_method(const Symbol* name,
2446 const Symbol* signature) const {
2447 return find_method_impl(name, signature,
2448 OverpassLookupMode::find,
2449 StaticLookupMode::find,
2450 PrivateLookupMode::find);
2451 }
2452
2453 Method* InstanceKlass::find_method_impl(const Symbol* name,
2454 const Symbol* signature,
2455 OverpassLookupMode overpass_mode,
2456 StaticLookupMode static_mode,
2457 PrivateLookupMode private_mode) const {
2458 return InstanceKlass::find_method_impl(methods(),
2459 name,
2460 signature,
2461 overpass_mode,
2462 static_mode,
2463 private_mode);
2464 }
2465
2466 // find_instance_method looks up the name/signature in the local methods array
2467 // and skips over static methods
2468 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
2469 const Symbol* name,
2470 const Symbol* signature,
2471 PrivateLookupMode private_mode) {
2472 Method* const meth = InstanceKlass::find_method_impl(methods,
2473 name,
2474 signature,
2475 OverpassLookupMode::find,
2476 StaticLookupMode::skip,
2477 private_mode);
2478 assert(((meth == nullptr) || !meth->is_static()),
2479 "find_instance_method should have skipped statics");
2480 return meth;
2481 }
2482
2483 // find_instance_method looks up the name/signature in the local methods array
2484 // and skips over static methods
2485 Method* InstanceKlass::find_instance_method(const Symbol* name,
2486 const Symbol* signature,
2487 PrivateLookupMode private_mode) const {
2488 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
2489 }
2490
2491 // Find looks up the name/signature in the local methods array
2492 // and filters on the overpass, static and private flags
2493 // This returns the first one found
2494 // note that the local methods array can have up to one overpass, one static
2495 // and one instance (private or not) with the same name/signature
2496 Method* InstanceKlass::find_local_method(const Symbol* name,
2497 const Symbol* signature,
2498 OverpassLookupMode overpass_mode,
2499 StaticLookupMode static_mode,
2500 PrivateLookupMode private_mode) const {
2501 return InstanceKlass::find_method_impl(methods(),
2502 name,
2503 signature,
2504 overpass_mode,
2505 static_mode,
2506 private_mode);
2507 }
2508
2509 // Find looks up the name/signature in the local methods array
2510 // and filters on the overpass, static and private flags
2511 // This returns the first one found
2512 // note that the local methods array can have up to one overpass, one static
2513 // and one instance (private or not) with the same name/signature
2514 Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
2515 const Symbol* name,
2516 const Symbol* signature,
2517 OverpassLookupMode overpass_mode,
2518 StaticLookupMode static_mode,
2519 PrivateLookupMode private_mode) {
2520 return InstanceKlass::find_method_impl(methods,
2521 name,
2522 signature,
2523 overpass_mode,
2524 static_mode,
2525 private_mode);
2526 }
2527
2528 Method* InstanceKlass::find_method(const Array<Method*>* methods,
2529 const Symbol* name,
2530 const Symbol* signature) {
2531 return InstanceKlass::find_method_impl(methods,
2532 name,
2533 signature,
2534 OverpassLookupMode::find,
2535 StaticLookupMode::find,
2536 PrivateLookupMode::find);
2537 }
2538
2539 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
2540 const Symbol* name,
2541 const Symbol* signature,
2542 OverpassLookupMode overpass_mode,
2543 StaticLookupMode static_mode,
2544 PrivateLookupMode private_mode) {
2545 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
2546 return hit >= 0 ? methods->at(hit): nullptr;
2547 }
2548
2549 // true if method matches signature and conforms to skipping_X conditions.
2550 static bool method_matches(const Method* m,
2551 const Symbol* signature,
2552 bool skipping_overpass,
2553 bool skipping_static,
2554 bool skipping_private) {
2555 return ((m->signature() == signature) &&
2556 (!skipping_overpass || !m->is_overpass()) &&
2557 (!skipping_static || !m->is_static()) &&
2558 (!skipping_private || !m->is_private()));
2559 }
2560
2561 // Used directly for default_methods to find the index into the
2562 // default_vtable_indices, and indirectly by find_method
2563 // find_method_index looks in the local methods array to return the index
2564 // of the matching name/signature. If, overpass methods are being ignored,
2565 // the search continues to find a potential non-overpass match. This capability
2566 // is important during method resolution to prefer a static method, for example,
2567 // over an overpass method.
2568 // There is the possibility in any _method's array to have the same name/signature
2569 // for a static method, an overpass method and a local instance method
2570 // To correctly catch a given method, the search criteria may need
2571 // to explicitly skip the other two. For local instance methods, it
2572 // is often necessary to skip private methods
2573 int InstanceKlass::find_method_index(const Array<Method*>* methods,
2574 const Symbol* name,
2575 const Symbol* signature,
2576 OverpassLookupMode overpass_mode,
2577 StaticLookupMode static_mode,
2578 PrivateLookupMode private_mode) {
2579 const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip);
2580 const bool skipping_static = (static_mode == StaticLookupMode::skip);
2581 const bool skipping_private = (private_mode == PrivateLookupMode::skip);
2582 const int hit = quick_search(methods, name);
2583 if (hit != -1) {
2584 const Method* const m = methods->at(hit);
2585
2586 // Do linear search to find matching signature. First, quick check
2587 // for common case, ignoring overpasses if requested.
2588 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2589 return hit;
2590 }
2591
2592 // search downwards through overloaded methods
2593 int i;
2594 for (i = hit - 1; i >= 0; --i) {
2595 const Method* const m = methods->at(i);
2596 assert(m->is_method(), "must be method");
2597 if (m->name() != name) {
2598 break;
2599 }
2600 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2601 return i;
2602 }
2603 }
2604 // search upwards
2605 for (i = hit + 1; i < methods->length(); ++i) {
2606 const Method* const m = methods->at(i);
2607 assert(m->is_method(), "must be method");
2608 if (m->name() != name) {
2609 break;
2610 }
2611 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2612 return i;
2613 }
2614 }
2615 // not found
2616 #ifdef ASSERT
2617 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
2618 linear_search(methods, name, signature);
2619 assert(-1 == index, "binary search should have found entry %d", index);
2620 #endif
2621 }
2622 return -1;
2623 }
2624
2625 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
2626 return find_method_by_name(methods(), name, end);
2627 }
2628
2629 int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
2630 const Symbol* name,
2631 int* end_ptr) {
2632 assert(end_ptr != nullptr, "just checking");
2633 int start = quick_search(methods, name);
2634 int end = start + 1;
2635 if (start != -1) {
2636 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
2637 while (end < methods->length() && (methods->at(end))->name() == name) ++end;
2638 *end_ptr = end;
2639 return start;
2640 }
2641 return -1;
2642 }
2643
2644 // uncached_lookup_method searches both the local class methods array and all
2645 // superclasses methods arrays, skipping any overpass methods in superclasses,
2646 // and possibly skipping private methods.
2647 Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
2648 const Symbol* signature,
2649 OverpassLookupMode overpass_mode,
2650 PrivateLookupMode private_mode) const {
2651 OverpassLookupMode overpass_local_mode = overpass_mode;
2652 const InstanceKlass* klass = this;
2653 while (klass != nullptr) {
2654 Method* const method = klass->find_method_impl(name,
2655 signature,
2656 overpass_local_mode,
2657 StaticLookupMode::find,
2658 private_mode);
2659 if (method != nullptr) {
2660 return method;
2661 }
2662 if (name == vmSymbols::object_initializer_name()) {
2663 break; // <init> is never inherited
2664 }
2665 klass = klass->super();
2666 overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses
2667 }
2668 return nullptr;
2669 }
2670
2671 #ifdef ASSERT
2672 // search through class hierarchy and return true if this class or
2673 // one of the superclasses was redefined
2674 bool InstanceKlass::has_redefined_this_or_super() const {
2675 const InstanceKlass* klass = this;
2676 while (klass != nullptr) {
2677 if (klass->has_been_redefined()) {
2678 return true;
2679 }
2680 klass = klass->super();
2681 }
2682 return false;
2683 }
2684 #endif
2685
2686 // lookup a method in the default methods list then in all transitive interfaces
2687 // Do NOT return private or static methods
2688 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
2689 Symbol* signature) const {
2690 Method* m = nullptr;
2691 if (default_methods() != nullptr) {
2692 m = find_method(default_methods(), name, signature);
2693 }
2694 // Look up interfaces
2695 if (m == nullptr) {
2696 m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find);
2697 }
2698 return m;
2699 }
2700
2701 // lookup a method in all the interfaces that this class implements
2702 // Do NOT return private or static methods, new in JDK8 which are not externally visible
2703 // They should only be found in the initial InterfaceMethodRef
2704 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
2705 Symbol* signature,
2706 DefaultsLookupMode defaults_mode) const {
2707 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2708 int num_ifs = all_ifs->length();
2709 InstanceKlass *ik = nullptr;
2710 for (int i = 0; i < num_ifs; i++) {
2711 ik = all_ifs->at(i);
2712 Method* m = ik->lookup_method(name, signature);
2713 if (m != nullptr && m->is_public() && !m->is_static() &&
2714 ((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) {
2715 return m;
2716 }
2717 }
2718 return nullptr;
2719 }
2720
2721 PrintClassClosure::PrintClassClosure(outputStream* st, bool verbose)
2722 :_st(st), _verbose(verbose) {
2723 ResourceMark rm;
2724 _st->print("%-18s ", "KlassAddr");
2725 _st->print("%-4s ", "Size");
2726 _st->print("%-20s ", "State");
2727 _st->print("%-7s ", "Flags");
2728 _st->print("%-5s ", "ClassName");
2729 _st->cr();
2730 }
2731
2732 void PrintClassClosure::do_klass(Klass* k) {
2733 ResourceMark rm;
2734 // klass pointer
2735 _st->print(PTR_FORMAT " ", p2i(k));
2736 // klass size
2737 _st->print("%4d ", k->size());
2738 // initialization state
2739 if (k->is_instance_klass()) {
2740 _st->print("%-20s ",InstanceKlass::cast(k)->init_state_name());
2741 } else {
2742 _st->print("%-20s ","");
2743 }
2744 // misc flags(Changes should synced with ClassesDCmd::ClassesDCmd help doc)
2745 char buf[10];
2746 int i = 0;
2747 if (k->has_finalizer()) buf[i++] = 'F';
2748 if (k->is_instance_klass()) {
2749 InstanceKlass* ik = InstanceKlass::cast(k);
2750 if (ik->has_final_method()) buf[i++] = 'f';
2751 if (ik->is_rewritten()) buf[i++] = 'W';
2752 if (ik->is_contended()) buf[i++] = 'C';
2753 if (ik->has_been_redefined()) buf[i++] = 'R';
2754 if (ik->in_aot_cache()) buf[i++] = 'S';
2755 }
2756 buf[i++] = '\0';
2757 _st->print("%-7s ", buf);
2758 // klass name
2759 _st->print("%-5s ", k->external_name());
2760 // end
2761 _st->cr();
2762 if (_verbose) {
2763 k->print_on(_st);
2764 }
2765 }
2766
2767 /* jni_id_for for jfieldIds only */
2768 JNIid* InstanceKlass::jni_id_for(int offset) {
2769 MutexLocker ml(JfieldIdCreation_lock);
2770 JNIid* probe = jni_ids() == nullptr ? nullptr : jni_ids()->find(offset);
2771 if (probe == nullptr) {
2772 // Allocate new static field identifier
2773 probe = new JNIid(this, offset, jni_ids());
2774 set_jni_ids(probe);
2775 }
2776 return probe;
2777 }
2778
2779 u2 InstanceKlass::enclosing_method_data(int offset) const {
2780 const Array<jushort>* const inner_class_list = inner_classes();
2781 if (inner_class_list == nullptr) {
2782 return 0;
2783 }
2784 const int length = inner_class_list->length();
2785 if (length % inner_class_next_offset == 0) {
2786 return 0;
2787 }
2788 const int index = length - enclosing_method_attribute_size;
2789 assert(offset < enclosing_method_attribute_size, "invalid offset");
2790 return inner_class_list->at(index + offset);
2791 }
2792
2793 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
2794 u2 method_index) {
2795 Array<jushort>* inner_class_list = inner_classes();
2796 assert (inner_class_list != nullptr, "_inner_classes list is not set up");
2797 int length = inner_class_list->length();
2798 if (length % inner_class_next_offset == enclosing_method_attribute_size) {
2799 int index = length - enclosing_method_attribute_size;
2800 inner_class_list->at_put(
2801 index + enclosing_method_class_index_offset, class_index);
2802 inner_class_list->at_put(
2803 index + enclosing_method_method_index_offset, method_index);
2804 }
2805 }
2806
2807 jmethodID InstanceKlass::update_jmethod_id(jmethodID* jmeths, Method* method, int idnum) {
2808 if (method->is_old() && !method->is_obsolete()) {
2809 // If the method passed in is old (but not obsolete), use the current version.
2810 method = method_with_idnum((int)idnum);
2811 assert(method != nullptr, "old and but not obsolete, so should exist");
2812 }
2813 jmethodID new_id = Method::make_jmethod_id(class_loader_data(), method);
2814 AtomicAccess::release_store(&jmeths[idnum + 1], new_id);
2815 return new_id;
2816 }
2817
2818 // Allocate the jmethodID cache.
2819 static jmethodID* create_jmethod_id_cache(size_t size) {
2820 jmethodID* jmeths = NEW_C_HEAP_ARRAY(jmethodID, size + 1, mtClass);
2821 memset(jmeths, 0, (size + 1) * sizeof(jmethodID));
2822 // cache size is stored in element[0], other elements offset by one
2823 jmeths[0] = (jmethodID)size;
2824 return jmeths;
2825 }
2826
2827 // When reading outside a lock, use this.
2828 jmethodID* InstanceKlass::methods_jmethod_ids_acquire() const {
2829 return AtomicAccess::load_acquire(&_methods_jmethod_ids);
2830 }
2831
2832 void InstanceKlass::release_set_methods_jmethod_ids(jmethodID* jmeths) {
2833 AtomicAccess::release_store(&_methods_jmethod_ids, jmeths);
2834 }
2835
2836 // Lookup or create a jmethodID.
2837 jmethodID InstanceKlass::get_jmethod_id(Method* method) {
2838 int idnum = method->method_idnum();
2839 jmethodID* jmeths = methods_jmethod_ids_acquire();
2840
2841 // We use a double-check locking idiom here because this cache is
2842 // performance sensitive. In the normal system, this cache only
2843 // transitions from null to non-null which is safe because we use
2844 // release_set_methods_jmethod_ids() to advertise the new cache.
2845 // A partially constructed cache should never be seen by a racing
2846 // thread. We also use release_store() to save a new jmethodID
2847 // in the cache so a partially constructed jmethodID should never be
2848 // seen either. Cache reads of existing jmethodIDs proceed without a
2849 // lock, but cache writes of a new jmethodID requires uniqueness and
2850 // creation of the cache itself requires no leaks so a lock is
2851 // acquired in those two cases.
2852 //
2853 // If the RedefineClasses() API has been used, then this cache grows
2854 // in the redefinition safepoint.
2855
2856 if (jmeths == nullptr) {
2857 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2858 jmeths = _methods_jmethod_ids;
2859 // Still null?
2860 if (jmeths == nullptr) {
2861 size_t size = idnum_allocated_count();
2862 assert(size > (size_t)idnum, "should already have space");
2863 jmeths = create_jmethod_id_cache(size);
2864 jmethodID new_id = update_jmethod_id(jmeths, method, idnum);
2865
2866 // publish jmeths
2867 release_set_methods_jmethod_ids(jmeths);
2868 return new_id;
2869 }
2870 }
2871
2872 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2873 if (id == nullptr) {
2874 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2875 id = jmeths[idnum + 1];
2876 // Still null?
2877 if (id == nullptr) {
2878 return update_jmethod_id(jmeths, method, idnum);
2879 }
2880 }
2881 return id;
2882 }
2883
2884 void InstanceKlass::update_methods_jmethod_cache() {
2885 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2886 jmethodID* cache = _methods_jmethod_ids;
2887 if (cache != nullptr) {
2888 size_t size = idnum_allocated_count();
2889 size_t old_size = (size_t)cache[0];
2890 if (old_size < size + 1) {
2891 // Allocate a larger one and copy entries to the new one.
2892 // They've already been updated to point to new methods where applicable (i.e., not obsolete).
2893 jmethodID* new_cache = create_jmethod_id_cache(size);
2894
2895 for (int i = 1; i <= (int)old_size; i++) {
2896 new_cache[i] = cache[i];
2897 }
2898 _methods_jmethod_ids = new_cache;
2899 FREE_C_HEAP_ARRAY(cache);
2900 }
2901 }
2902 }
2903
2904 // Make a jmethodID for all methods in this class. This makes getting all method
2905 // ids much, much faster with classes with more than 8
2906 // methods, and has a *substantial* effect on performance with jvmti
2907 // code that loads all jmethodIDs for all classes.
2908 void InstanceKlass::make_methods_jmethod_ids() {
2909 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2910 jmethodID* jmeths = _methods_jmethod_ids;
2911 if (jmeths == nullptr) {
2912 jmeths = create_jmethod_id_cache(idnum_allocated_count());
2913 release_set_methods_jmethod_ids(jmeths);
2914 }
2915
2916 int length = methods()->length();
2917 for (int index = 0; index < length; index++) {
2918 Method* m = methods()->at(index);
2919 int idnum = m->method_idnum();
2920 assert(!m->is_old(), "should not have old methods or I'm confused");
2921 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2922 if (!m->is_overpass() && // skip overpasses
2923 id == nullptr) {
2924 id = Method::make_jmethod_id(class_loader_data(), m);
2925 AtomicAccess::release_store(&jmeths[idnum + 1], id);
2926 }
2927 }
2928 }
2929
2930 // Lookup a jmethodID, null if not found. Do no blocking, no allocations, no handles
2931 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2932 int idnum = method->method_idnum();
2933 jmethodID* jmeths = methods_jmethod_ids_acquire();
2934 return (jmeths != nullptr) ? jmeths[idnum + 1] : nullptr;
2935 }
2936
2937 inline DependencyContext InstanceKlass::dependencies() {
2938 DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2939 return dep_context;
2940 }
2941
2942 void InstanceKlass::mark_dependent_nmethods(DeoptimizationScope* deopt_scope, KlassDepChange& changes) {
2943 dependencies().mark_dependent_nmethods(deopt_scope, changes);
2944 }
2945
2946 void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2947 assert_lock_strong(CodeCache_lock);
2948 dependencies().add_dependent_nmethod(nm);
2949 }
2950
2951 void InstanceKlass::clean_dependency_context() {
2952 dependencies().clean_unloading_dependents();
2953 }
2954
2955 #ifndef PRODUCT
2956 void InstanceKlass::print_dependent_nmethods(bool verbose) {
2957 dependencies().print_dependent_nmethods(verbose);
2958 }
2959
2960 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2961 return dependencies().is_dependent_nmethod(nm);
2962 }
2963 #endif //PRODUCT
2964
2965 void InstanceKlass::clean_weak_instanceklass_links() {
2966 clean_implementors_list();
2967 clean_method_data();
2968 }
2969
2970 void InstanceKlass::clean_implementors_list() {
2971 assert(is_loader_alive(), "this klass should be live");
2972 if (is_interface()) {
2973 assert (ClassUnloading, "only called for ClassUnloading");
2974 for (;;) {
2975 // Use load_acquire due to competing with inserts
2976 InstanceKlass* volatile* iklass = adr_implementor();
2977 assert(iklass != nullptr, "Klass must not be null");
2978 InstanceKlass* impl = AtomicAccess::load_acquire(iklass);
2979 if (impl != nullptr && !impl->is_loader_alive()) {
2980 // null this field, might be an unloaded instance klass or null
2981 if (AtomicAccess::cmpxchg(iklass, impl, (InstanceKlass*)nullptr) == impl) {
2982 // Successfully unlinking implementor.
2983 if (log_is_enabled(Trace, class, unload)) {
2984 ResourceMark rm;
2985 log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2986 }
2987 return;
2988 }
2989 } else {
2990 return;
2991 }
2992 }
2993 }
2994 }
2995
2996 void InstanceKlass::clean_method_data() {
2997 for (int m = 0; m < methods()->length(); m++) {
2998 MethodData* mdo = methods()->at(m)->method_data();
2999 if (mdo != nullptr) {
3000 mdo->clean_method_data(/*always_clean*/false);
3001 }
3002 }
3003 }
3004
3005 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
3006 Klass::metaspace_pointers_do(it);
3007
3008 if (log_is_enabled(Trace, aot)) {
3009 ResourceMark rm;
3010 log_trace(aot)("Iter(InstanceKlass): %p (%s)", this, external_name());
3011 }
3012
3013 it->push(&_annotations);
3014 it->push((Klass**)&_array_klasses);
3015 if (!is_rewritten()) {
3016 it->push(&_constants, MetaspaceClosure::_writable);
3017 } else {
3018 it->push(&_constants);
3019 }
3020 it->push(&_inner_classes);
3021 #if INCLUDE_JVMTI
3022 it->push(&_previous_versions);
3023 #endif
3024 #if INCLUDE_CDS
3025 // For "old" classes with methods containing the jsr bytecode, the _methods array will
3026 // be rewritten during runtime (see Rewriter::rewrite_jsrs()) but they cannot be safely
3027 // checked here with ByteCodeStream. All methods that can't be verified are made writable.
3028 // The length check on the _methods is necessary because classes which don't have any
3029 // methods share the Universe::_the_empty_method_array which is in the RO region.
3030 if (_methods != nullptr && _methods->length() > 0 && !can_be_verified_at_dumptime()) {
3031 // To handle jsr bytecode, new Method* maybe stored into _methods
3032 it->push(&_methods, MetaspaceClosure::_writable);
3033 } else {
3034 #endif
3035 it->push(&_methods);
3036 #if INCLUDE_CDS
3037 }
3038 #endif
3039 it->push(&_default_methods);
3040 it->push(&_local_interfaces);
3041 it->push(&_transitive_interfaces);
3042 it->push(&_method_ordering);
3043 if (!is_rewritten()) {
3044 it->push(&_default_vtable_indices, MetaspaceClosure::_writable);
3045 } else {
3046 it->push(&_default_vtable_indices);
3047 }
3048
3049 it->push(&_fieldinfo_stream);
3050 it->push(&_fieldinfo_search_table);
3051 // _fields_status might be written into by Rewriter::scan_method() -> fd.set_has_initialized_final_update()
3052 it->push(&_fields_status, MetaspaceClosure::_writable);
3053
3054 if (itable_length() > 0) {
3055 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
3056 int method_table_offset_in_words = ioe->offset()/wordSize;
3057 int itable_offset_in_words = (int)(start_of_itable() - (intptr_t*)this);
3058
3059 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words)
3060 / itableOffsetEntry::size();
3061
3062 for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
3063 if (ioe->interface_klass() != nullptr) {
3064 it->push(ioe->interface_klass_addr());
3065 itableMethodEntry* ime = ioe->first_method_entry(this);
3066 int n = klassItable::method_count_for_interface(ioe->interface_klass());
3067 for (int index = 0; index < n; index ++) {
3068 it->push(ime[index].method_addr());
3069 }
3070 }
3071 }
3072 }
3073
3074 it->push(&_nest_host);
3075 it->push(&_nest_members);
3076 it->push(&_permitted_subclasses);
3077 it->push(&_loadable_descriptors);
3078 it->push(&_acmp_maps_array, MetaspaceClosure::_writable);
3079 it->push(&_record_components);
3080 it->push(&_inline_layout_info_array, MetaspaceClosure::_writable);
3081
3082 if (CDSConfig::is_dumping_full_module_graph() && !defined_by_other_loaders()) {
3083 it->push(&_package_entry);
3084 }
3085 }
3086
3087 #if INCLUDE_CDS
3088 void InstanceKlass::remove_unshareable_info() {
3089
3090 if (is_linked()) {
3091 assert(can_be_verified_at_dumptime(), "must be");
3092 // Remember this so we can avoid walking the hierarchy at runtime.
3093 set_verified_at_dump_time();
3094 }
3095
3096 _misc_flags.set_has_init_deps_processed(false);
3097
3098 Klass::remove_unshareable_info();
3099
3100 if (SystemDictionaryShared::has_class_failed_verification(this)) {
3101 // Classes are attempted to link during dumping and may fail,
3102 // but these classes are still in the dictionary and class list in CLD.
3103 // If the class has failed verification, there is nothing else to remove.
3104 return;
3105 }
3106
3107 // Reset to the 'allocated' state to prevent any premature accessing to
3108 // a shared class at runtime while the class is still being loaded and
3109 // restored. A class' init_state is set to 'loaded' at runtime when it's
3110 // being added to class hierarchy (see InstanceKlass:::add_to_hierarchy()).
3111 _init_state = allocated;
3112
3113 { // Otherwise this needs to take out the Compile_lock.
3114 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
3115 init_implementor();
3116 }
3117
3118 // Call remove_unshareable_info() on other objects that belong to this class, except
3119 // for constants()->remove_unshareable_info(), which is called in a separate pass in
3120 // ArchiveBuilder::make_klasses_shareable(),
3121
3122 for (int i = 0; i < methods()->length(); i++) {
3123 Method* m = methods()->at(i);
3124 m->remove_unshareable_info();
3125 }
3126
3127 // do array classes also.
3128 if (array_klasses() != nullptr) {
3129 array_klasses()->remove_unshareable_info();
3130 }
3131
3132 // These are not allocated from metaspace. They are safe to set to nullptr.
3133 _source_debug_extension = nullptr;
3134 _dep_context = nullptr;
3135 _osr_nmethods_head = nullptr;
3136 #if INCLUDE_JVMTI
3137 _breakpoints = nullptr;
3138 _previous_versions = nullptr;
3139 _cached_class_file = nullptr;
3140 _jvmti_cached_class_field_map = nullptr;
3141 #endif
3142
3143 _init_thread = nullptr;
3144 _methods_jmethod_ids = nullptr;
3145 _jni_ids = nullptr;
3146 _oop_map_cache = nullptr;
3147 if (CDSConfig::is_dumping_method_handles() && HeapShared::is_lambda_proxy_klass(this)) {
3148 // keep _nest_host
3149 } else {
3150 // clear _nest_host to ensure re-load at runtime
3151 _nest_host = nullptr;
3152 }
3153 init_shared_package_entry();
3154 _dep_context_last_cleaned = 0;
3155 DEBUG_ONLY(_shared_class_load_count = 0);
3156
3157 remove_unshareable_flags();
3158
3159 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
3160 }
3161
3162 void InstanceKlass::remove_unshareable_flags() {
3163 // clear all the flags/stats that shouldn't be in the archived version
3164 assert(!is_scratch_class(), "must be");
3165 assert(!has_been_redefined(), "must be");
3166 #if INCLUDE_JVMTI
3167 set_is_being_redefined(false);
3168 #endif
3169 set_has_resolved_methods(false);
3170 }
3171
3172 void InstanceKlass::remove_java_mirror() {
3173 Klass::remove_java_mirror();
3174
3175 // do array classes also.
3176 if (array_klasses() != nullptr) {
3177 array_klasses()->remove_java_mirror();
3178 }
3179 }
3180
3181 void InstanceKlass::init_shared_package_entry() {
3182 assert(CDSConfig::is_dumping_archive(), "must be");
3183 if (!CDSConfig::is_dumping_full_module_graph() || defined_by_other_loaders()) {
3184 _package_entry = nullptr;
3185 }
3186 }
3187
3188 void InstanceKlass::compute_has_loops_flag_for_methods() {
3189 Array<Method*>* methods = this->methods();
3190 for (int index = 0; index < methods->length(); ++index) {
3191 Method* m = methods->at(index);
3192 if (!m->is_overpass()) { // work around JDK-8305771
3193 m->compute_has_loops_flag();
3194 }
3195 }
3196 }
3197
3198 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain,
3199 PackageEntry* pkg_entry, TRAPS) {
3200 // InstanceKlass::add_to_hierarchy() sets the init_state to loaded
3201 // before the InstanceKlass is added to the SystemDictionary. Make
3202 // sure the current state is <loaded.
3203 assert(!is_loaded(), "invalid init state");
3204 assert(!shared_loading_failed(), "Must not try to load failed class again");
3205 set_package(loader_data, pkg_entry, CHECK);
3206 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
3207
3208 if (is_inline_klass()) {
3209 InlineKlass::cast(this)->initialize_calling_convention(CHECK);
3210 }
3211
3212 Array<Method*>* methods = this->methods();
3213 int num_methods = methods->length();
3214 for (int index = 0; index < num_methods; ++index) {
3215 methods->at(index)->restore_unshareable_info(CHECK);
3216 }
3217 #if INCLUDE_JVMTI
3218 if (JvmtiExport::has_redefined_a_class()) {
3219 // Reinitialize vtable because RedefineClasses may have changed some
3220 // entries in this vtable for super classes so the CDS vtable might
3221 // point to old or obsolete entries. RedefineClasses doesn't fix up
3222 // vtables in the shared system dictionary, only the main one.
3223 // It also redefines the itable too so fix that too.
3224 // First fix any default methods that point to a super class that may
3225 // have been redefined.
3226 bool trace_name_printed = false;
3227 adjust_default_methods(&trace_name_printed);
3228 if (verified_at_dump_time()) {
3229 // Initialize vtable and itable for classes which can be verified at dump time.
3230 // Unlinked classes such as old classes with major version < 50 cannot be verified
3231 // at dump time.
3232 vtable().initialize_vtable();
3233 itable().initialize_itable();
3234 }
3235 }
3236 #endif // INCLUDE_JVMTI
3237
3238 // restore constant pool resolved references
3239 constants()->restore_unshareable_info(CHECK);
3240
3241 // Restore acmp_maps java array from the version stored in metadata.
3242 // if it cannot be found in the archive
3243 if (Arguments::is_valhalla_enabled() && has_acmp_maps_offset() && java_mirror()->obj_field(_acmp_maps_offset) == nullptr) {
3244 int acmp_maps_size = _acmp_maps_array->length();
3245 typeArrayOop map = oopFactory::new_intArray(acmp_maps_size, CHECK);
3246 typeArrayHandle map_h(THREAD, map);
3247 for (int i = 0; i < acmp_maps_size; i++) {
3248 map_h->int_at_put(i, _acmp_maps_array->at(i));
3249 }
3250 java_mirror()->obj_field_put(_acmp_maps_offset, map_h());
3251 }
3252
3253 if (array_klasses() != nullptr) {
3254 // To get a consistent list of classes we need MultiArray_lock to ensure
3255 // array classes aren't observed while they are being restored.
3256 RecursiveLocker rl(MultiArray_lock, THREAD);
3257 assert(this == ObjArrayKlass::cast(array_klasses())->bottom_klass(), "sanity");
3258 // Array classes have null protection domain.
3259 // --> see ArrayKlass::complete_create_array_klass()
3260 if (class_loader_data() == nullptr) {
3261 ResourceMark rm(THREAD);
3262 log_debug(cds)(" loader_data %s ", loader_data == nullptr ? "nullptr" : "non null");
3263 log_debug(cds)(" this %s array_klasses %s ", this->name()->as_C_string(), array_klasses()->name()->as_C_string());
3264 }
3265 assert(!array_klasses()->is_refined_objArray_klass(), "must be non-refined objarrayklass");
3266 array_klasses()->restore_unshareable_info(class_loader_data(), Handle(), CHECK);
3267 }
3268
3269 // Initialize @ValueBased class annotation if not already set in the archived klass.
3270 if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation() && !is_value_based()) {
3271 set_is_value_based();
3272 }
3273
3274 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
3275 }
3276
3277 bool InstanceKlass::can_be_verified_at_dumptime() const {
3278 if (CDSConfig::is_dumping_dynamic_archive() && AOTMetaspace::in_aot_cache(this)) {
3279 // This is a class that was dumped into the base archive, so we know
3280 // it was verified at dump time.
3281 return true;
3282 }
3283
3284 if (CDSConfig::is_preserving_verification_constraints()) {
3285 return true;
3286 }
3287
3288 if (CDSConfig::is_old_class_for_verifier(this)) {
3289 // The old verifier does not save verification constraints, so at run time
3290 // SystemDictionaryShared::check_verification_constraints() will not work for this class.
3291 return false;
3292 }
3293 if (super() != nullptr && !super()->can_be_verified_at_dumptime()) {
3294 return false;
3295 }
3296 Array<InstanceKlass*>* interfaces = local_interfaces();
3297 int len = interfaces->length();
3298 for (int i = 0; i < len; i++) {
3299 if (!interfaces->at(i)->can_be_verified_at_dumptime()) {
3300 return false;
3301 }
3302 }
3303 return true;
3304 }
3305
3306 #endif // INCLUDE_CDS
3307
3308 #if INCLUDE_JVMTI
3309 static void clear_all_breakpoints(Method* m) {
3310 m->clear_all_breakpoints();
3311 }
3312 #endif
3313
3314 void InstanceKlass::unload_class(InstanceKlass* ik) {
3315
3316 if (ik->is_scratch_class()) {
3317 assert(ik->dependencies().is_empty(), "dependencies should be empty for scratch classes");
3318 return;
3319 }
3320 assert(ik->is_loaded(), "class should be loaded " PTR_FORMAT, p2i(ik));
3321
3322 // Release dependencies.
3323 ik->dependencies().remove_all_dependents();
3324
3325 // notify the debugger
3326 if (JvmtiExport::should_post_class_unload()) {
3327 JvmtiExport::post_class_unload(ik);
3328 }
3329
3330 // notify ClassLoadingService of class unload
3331 ClassLoadingService::notify_class_unloaded(ik);
3332
3333 SystemDictionaryShared::handle_class_unloading(ik);
3334
3335 if (log_is_enabled(Info, class, unload)) {
3336 ResourceMark rm;
3337 log_info(class, unload)("unloading class %s " PTR_FORMAT, ik->external_name(), p2i(ik));
3338 }
3339
3340 Events::log_class_unloading(Thread::current(), ik);
3341
3342 #if INCLUDE_JFR
3343 assert(ik != nullptr, "invariant");
3344 EventClassUnload event;
3345 event.set_unloadedClass(ik);
3346 event.set_definingClassLoader(ik->class_loader_data());
3347 event.commit();
3348 #endif
3349 }
3350
3351 static void method_release_C_heap_structures(Method* m) {
3352 m->release_C_heap_structures();
3353 }
3354
3355 // Called also by InstanceKlass::deallocate_contents, with false for release_sub_metadata.
3356 void InstanceKlass::release_C_heap_structures(bool release_sub_metadata) {
3357 // Clean up C heap
3358 Klass::release_C_heap_structures();
3359
3360 // Deallocate and call destructors for MDO mutexes
3361 if (release_sub_metadata) {
3362 methods_do(method_release_C_heap_structures);
3363 }
3364
3365 // Deallocate oop map cache
3366 if (_oop_map_cache != nullptr) {
3367 delete _oop_map_cache;
3368 _oop_map_cache = nullptr;
3369 }
3370
3371 // Deallocate JNI identifiers for jfieldIDs
3372 JNIid::deallocate(jni_ids());
3373 set_jni_ids(nullptr);
3374
3375 jmethodID* jmeths = _methods_jmethod_ids;
3376 if (jmeths != nullptr) {
3377 release_set_methods_jmethod_ids(nullptr);
3378 FreeHeap(jmeths);
3379 }
3380
3381 assert(_dep_context == nullptr,
3382 "dependencies should already be cleaned");
3383
3384 #if INCLUDE_JVMTI
3385 // Deallocate breakpoint records
3386 if (breakpoints() != nullptr) {
3387 methods_do(clear_all_breakpoints);
3388 assert(breakpoints() == nullptr, "should have cleared breakpoints");
3389 }
3390
3391 // deallocate the cached class file
3392 if (_cached_class_file != nullptr) {
3393 os::free(_cached_class_file);
3394 _cached_class_file = nullptr;
3395 }
3396 #endif
3397
3398 FREE_C_HEAP_ARRAY(_source_debug_extension);
3399
3400 if (release_sub_metadata) {
3401 constants()->release_C_heap_structures();
3402 }
3403 }
3404
3405 // The constant pool is on stack if any of the methods are executing or
3406 // referenced by handles.
3407 bool InstanceKlass::on_stack() const {
3408 return _constants->on_stack();
3409 }
3410
3411 Symbol* InstanceKlass::source_file_name() const { return _constants->source_file_name(); }
3412 u2 InstanceKlass::source_file_name_index() const { return _constants->source_file_name_index(); }
3413 void InstanceKlass::set_source_file_name_index(u2 sourcefile_index) { _constants->set_source_file_name_index(sourcefile_index); }
3414
3415 // minor and major version numbers of class file
3416 u2 InstanceKlass::minor_version() const { return _constants->minor_version(); }
3417 void InstanceKlass::set_minor_version(u2 minor_version) { _constants->set_minor_version(minor_version); }
3418 u2 InstanceKlass::major_version() const { return _constants->major_version(); }
3419 void InstanceKlass::set_major_version(u2 major_version) { _constants->set_major_version(major_version); }
3420
3421 bool InstanceKlass::supports_inline_types() const {
3422 return major_version() >= Verifier::VALUE_TYPES_MAJOR_VERSION && minor_version() == Verifier::JAVA_PREVIEW_MINOR_VERSION;
3423 }
3424
3425 const InstanceKlass* InstanceKlass::get_klass_version(int version) const {
3426 for (const InstanceKlass* ik = this; ik != nullptr; ik = ik->previous_versions()) {
3427 if (ik->constants()->version() == version) {
3428 return ik;
3429 }
3430 }
3431 return nullptr;
3432 }
3433
3434 void InstanceKlass::set_source_debug_extension(const char* array, int length) {
3435 if (array == nullptr) {
3436 _source_debug_extension = nullptr;
3437 } else {
3438 // Adding one to the attribute length in order to store a null terminator
3439 // character could cause an overflow because the attribute length is
3440 // already coded with an u4 in the classfile, but in practice, it's
3441 // unlikely to happen.
3442 assert((length+1) > length, "Overflow checking");
3443 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
3444 for (int i = 0; i < length; i++) {
3445 sde[i] = array[i];
3446 }
3447 sde[length] = '\0';
3448 _source_debug_extension = sde;
3449 }
3450 }
3451
3452 Symbol* InstanceKlass::generic_signature() const { return _constants->generic_signature(); }
3453 u2 InstanceKlass::generic_signature_index() const { return _constants->generic_signature_index(); }
3454 void InstanceKlass::set_generic_signature_index(u2 sig_index) { _constants->set_generic_signature_index(sig_index); }
3455
3456 const char* InstanceKlass::signature_name() const {
3457 return signature_name_of_carrier(JVM_SIGNATURE_CLASS);
3458 }
3459
3460 const char* InstanceKlass::signature_name_of_carrier(char c) const {
3461 // Get the internal name as a c string
3462 const char* src = (const char*) (name()->as_C_string());
3463 const int src_length = (int)strlen(src);
3464
3465 char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
3466
3467 // Add L or Q as type indicator
3468 int dest_index = 0;
3469 dest[dest_index++] = c;
3470
3471 // Add the actual class name
3472 for (int src_index = 0; src_index < src_length; ) {
3473 dest[dest_index++] = src[src_index++];
3474 }
3475
3476 if (is_hidden()) { // Replace the last '+' with a '.'.
3477 for (int index = (int)src_length; index > 0; index--) {
3478 if (dest[index] == '+') {
3479 dest[index] = JVM_SIGNATURE_DOT;
3480 break;
3481 }
3482 }
3483 }
3484
3485 // Add the semicolon and the null
3486 dest[dest_index++] = JVM_SIGNATURE_ENDCLASS;
3487 dest[dest_index] = '\0';
3488 return dest;
3489 }
3490
3491 ModuleEntry* InstanceKlass::module() const {
3492 if (is_hidden() &&
3493 in_unnamed_package() &&
3494 class_loader_data()->has_class_mirror_holder()) {
3495 // For a non-strong hidden class defined to an unnamed package,
3496 // its (class held) CLD will not have an unnamed module created for it.
3497 // Two choices to find the correct ModuleEntry:
3498 // 1. If hidden class is within a nest, use nest host's module
3499 // 2. Find the unnamed module off from the class loader
3500 // For now option #2 is used since a nest host is not set until
3501 // after the instance class is created in jvm_lookup_define_class().
3502 if (class_loader_data()->is_boot_class_loader_data()) {
3503 return ClassLoaderData::the_null_class_loader_data()->unnamed_module();
3504 } else {
3505 oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader());
3506 assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module");
3507 return java_lang_Module::module_entry(module);
3508 }
3509 }
3510
3511 // Class is in a named package
3512 if (!in_unnamed_package()) {
3513 return _package_entry->module();
3514 }
3515
3516 // Class is in an unnamed package, return its loader's unnamed module
3517 return class_loader_data()->unnamed_module();
3518 }
3519
3520 bool InstanceKlass::in_javabase_module() const {
3521 return module()->name() == vmSymbols::java_base();
3522 }
3523
3524 void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPS) {
3525
3526 // ensure java/ packages only loaded by boot or platform builtin loaders
3527 // not needed for shared class since CDS does not archive prohibited classes.
3528 if (!in_aot_cache()) {
3529 check_prohibited_package(name(), loader_data, CHECK);
3530 }
3531
3532 if (in_aot_cache() && _package_entry != nullptr) {
3533 if (CDSConfig::is_using_full_module_graph() && _package_entry == pkg_entry) {
3534 // we can use the saved package
3535 assert(AOTMetaspace::in_aot_cache(_package_entry), "must be");
3536 return;
3537 } else {
3538 _package_entry = nullptr;
3539 }
3540 }
3541
3542 // ClassLoader::package_from_class_name has already incremented the refcount of the symbol
3543 // it returns, so we need to decrement it when the current function exits.
3544 TempNewSymbol from_class_name =
3545 (pkg_entry != nullptr) ? nullptr : ClassLoader::package_from_class_name(name());
3546
3547 Symbol* pkg_name;
3548 if (pkg_entry != nullptr) {
3549 pkg_name = pkg_entry->name();
3550 } else {
3551 pkg_name = from_class_name;
3552 }
3553
3554 if (pkg_name != nullptr && loader_data != nullptr) {
3555
3556 // Find in class loader's package entry table.
3557 _package_entry = pkg_entry != nullptr ? pkg_entry : loader_data->packages()->lookup_only(pkg_name);
3558
3559 // If the package name is not found in the loader's package
3560 // entry table, it is an indication that the package has not
3561 // been defined. Consider it defined within the unnamed module.
3562 if (_package_entry == nullptr) {
3563
3564 if (!ModuleEntryTable::javabase_defined()) {
3565 // Before java.base is defined during bootstrapping, define all packages in
3566 // the java.base module. If a non-java.base package is erroneously placed
3567 // in the java.base module it will be caught later when java.base
3568 // is defined by ModuleEntryTable::verify_javabase_packages check.
3569 assert(ModuleEntryTable::javabase_moduleEntry() != nullptr, JAVA_BASE_NAME " module is null");
3570 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, ModuleEntryTable::javabase_moduleEntry());
3571 } else {
3572 assert(loader_data->unnamed_module() != nullptr, "unnamed module is null");
3573 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, loader_data->unnamed_module());
3574 }
3575
3576 // A package should have been successfully created
3577 DEBUG_ONLY(ResourceMark rm(THREAD));
3578 assert(_package_entry != nullptr, "Package entry for class %s not found, loader %s",
3579 name()->as_C_string(), loader_data->loader_name_and_id());
3580 }
3581
3582 if (log_is_enabled(Debug, module)) {
3583 ResourceMark rm(THREAD);
3584 ModuleEntry* m = _package_entry->module();
3585 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s",
3586 external_name(),
3587 pkg_name->as_C_string(),
3588 loader_data->loader_name_and_id(),
3589 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE));
3590 }
3591 } else {
3592 ResourceMark rm(THREAD);
3593 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
3594 external_name(),
3595 (loader_data != nullptr) ? loader_data->loader_name_and_id() : "null",
3596 UNNAMED_MODULE);
3597 }
3598 }
3599
3600 // Function set_classpath_index ensures that for a non-null _package_entry
3601 // of the InstanceKlass, the entry is in the boot loader's package entry table.
3602 // It then sets the classpath_index in the package entry record.
3603 //
3604 // The classpath_index field is used to find the entry on the boot loader class
3605 // path for packages with classes loaded by the boot loader from -Xbootclasspath/a
3606 // in an unnamed module. It is also used to indicate (for all packages whose
3607 // classes are loaded by the boot loader) that at least one of the package's
3608 // classes has been loaded.
3609 void InstanceKlass::set_classpath_index(s2 path_index) {
3610 if (_package_entry != nullptr) {
3611 DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();)
3612 assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same");
3613 assert(path_index != -1, "Unexpected classpath_index");
3614 _package_entry->set_classpath_index(path_index);
3615 }
3616 }
3617
3618 // different versions of is_same_class_package
3619
3620 bool InstanceKlass::is_same_class_package(const Klass* class2) const {
3621 oop classloader1 = this->class_loader();
3622 PackageEntry* classpkg1 = this->package();
3623 if (class2->is_objArray_klass()) {
3624 class2 = ObjArrayKlass::cast(class2)->bottom_klass();
3625 }
3626
3627 oop classloader2;
3628 PackageEntry* classpkg2;
3629 if (class2->is_instance_klass()) {
3630 classloader2 = class2->class_loader();
3631 classpkg2 = class2->package();
3632 } else {
3633 assert(class2->is_typeArray_klass(), "should be type array");
3634 classloader2 = nullptr;
3635 classpkg2 = nullptr;
3636 }
3637
3638 // Same package is determined by comparing class loader
3639 // and package entries. Both must be the same. This rule
3640 // applies even to classes that are defined in the unnamed
3641 // package, they still must have the same class loader.
3642 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
3643 return true;
3644 }
3645
3646 return false;
3647 }
3648
3649 // return true if this class and other_class are in the same package. Classloader
3650 // and classname information is enough to determine a class's package
3651 bool InstanceKlass::is_same_class_package(oop other_class_loader,
3652 const Symbol* other_class_name) const {
3653 if (class_loader() != other_class_loader) {
3654 return false;
3655 }
3656 if (name()->fast_compare(other_class_name) == 0) {
3657 return true;
3658 }
3659
3660 {
3661 ResourceMark rm;
3662
3663 bool bad_class_name = false;
3664 TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name);
3665 if (bad_class_name) {
3666 return false;
3667 }
3668 // Check that package_from_class_name() returns null, not "", if there is no package.
3669 assert(other_pkg == nullptr || other_pkg->utf8_length() > 0, "package name is empty string");
3670
3671 const Symbol* const this_package_name =
3672 this->package() != nullptr ? this->package()->name() : nullptr;
3673
3674 if (this_package_name == nullptr || other_pkg == nullptr) {
3675 // One of the two doesn't have a package. Only return true if the other
3676 // one also doesn't have a package.
3677 return this_package_name == other_pkg;
3678 }
3679
3680 // Check if package is identical
3681 return this_package_name->fast_compare(other_pkg) == 0;
3682 }
3683 }
3684
3685 static bool is_prohibited_package_slow(Symbol* class_name) {
3686 // Caller has ResourceMark
3687 int length;
3688 jchar* unicode = class_name->as_unicode(length);
3689 return (length >= 5 &&
3690 unicode[0] == 'j' &&
3691 unicode[1] == 'a' &&
3692 unicode[2] == 'v' &&
3693 unicode[3] == 'a' &&
3694 unicode[4] == '/');
3695 }
3696
3697 // Only boot and platform class loaders can define classes in "java/" packages.
3698 void InstanceKlass::check_prohibited_package(Symbol* class_name,
3699 ClassLoaderData* loader_data,
3700 TRAPS) {
3701 if (!loader_data->is_boot_class_loader_data() &&
3702 !loader_data->is_platform_class_loader_data() &&
3703 class_name != nullptr && class_name->utf8_length() >= 5) {
3704 ResourceMark rm(THREAD);
3705 bool prohibited;
3706 const u1* base = class_name->base();
3707 if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) {
3708 prohibited = is_prohibited_package_slow(class_name);
3709 } else {
3710 char* name = class_name->as_C_string();
3711 prohibited = (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/');
3712 }
3713 if (prohibited) {
3714 TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name);
3715 assert(pkg_name != nullptr, "Error in parsing package name starting with 'java/'");
3716 char* name = pkg_name->as_C_string();
3717 const char* class_loader_name = loader_data->loader_name_and_id();
3718 StringUtils::replace_no_expand(name, "/", ".");
3719 const char* msg_text1 = "Class loader (instance of): ";
3720 const char* msg_text2 = " tried to load prohibited package name: ";
3721 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
3722 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len);
3723 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
3724 THROW_MSG(vmSymbols::java_lang_SecurityException(), message);
3725 }
3726 }
3727 return;
3728 }
3729
3730 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const {
3731 constantPoolHandle i_cp(THREAD, constants());
3732 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
3733 int ioff = iter.inner_class_info_index();
3734 if (ioff != 0) {
3735 // Check to see if the name matches the class we're looking for
3736 // before attempting to find the class.
3737 if (i_cp->klass_name_at_matches(this, ioff)) {
3738 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
3739 if (this == inner_klass) {
3740 *ooff = iter.outer_class_info_index();
3741 *noff = iter.inner_name_index();
3742 return true;
3743 }
3744 }
3745 }
3746 }
3747 return false;
3748 }
3749
3750 void InstanceKlass::check_can_be_annotated_with_NullRestricted(InstanceKlass* type, Symbol* container_klass_name, TRAPS) {
3751 assert(type->is_instance_klass(), "Sanity check");
3752 if (type->is_identity_class()) {
3753 ResourceMark rm(THREAD);
3754 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(),
3755 err_msg("Class %s expects class %s to be a value class, but it is an identity class",
3756 container_klass_name->as_C_string(),
3757 type->external_name()));
3758 }
3759
3760 if (type->is_abstract()) {
3761 ResourceMark rm(THREAD);
3762 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(),
3763 err_msg("Class %s expects class %s to be concrete value type, but it is an abstract class",
3764 container_klass_name->as_C_string(),
3765 type->external_name()));
3766 }
3767 }
3768
3769 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const {
3770 InstanceKlass* outer_klass = nullptr;
3771 *inner_is_member = false;
3772 int ooff = 0, noff = 0;
3773 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD);
3774 if (has_inner_classes_attr) {
3775 constantPoolHandle i_cp(THREAD, constants());
3776 if (ooff != 0) {
3777 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);
3778 if (!ok->is_instance_klass()) {
3779 // If the outer class is not an instance klass then it cannot have
3780 // declared any inner classes.
3781 ResourceMark rm(THREAD);
3782 // Names are all known to be < 64k so we know this formatted message is not excessively large.
3783 Exceptions::fthrow(
3784 THREAD_AND_LOCATION,
3785 vmSymbols::java_lang_IncompatibleClassChangeError(),
3786 "%s and %s disagree on InnerClasses attribute",
3787 ok->external_name(),
3788 external_name());
3789 return nullptr;
3790 }
3791 outer_klass = InstanceKlass::cast(ok);
3792 *inner_is_member = true;
3793 }
3794 if (nullptr == outer_klass) {
3795 // It may be a local class; try for that.
3796 int encl_method_class_idx = enclosing_method_class_index();
3797 if (encl_method_class_idx != 0) {
3798 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL);
3799 outer_klass = InstanceKlass::cast(ok);
3800 *inner_is_member = false;
3801 }
3802 }
3803 }
3804
3805 // If no inner class attribute found for this class.
3806 if (nullptr == outer_klass) return nullptr;
3807
3808 // Throws an exception if outer klass has not declared k as an inner klass
3809 // We need evidence that each klass knows about the other, or else
3810 // the system could allow a spoof of an inner class to gain access rights.
3811 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL);
3812 return outer_klass;
3813 }
3814
3815 u2 InstanceKlass::compute_modifier_flags() const {
3816 u2 access = access_flags().as_unsigned_short();
3817
3818 // But check if it happens to be member class.
3819 InnerClassesIterator iter(this);
3820 for (; !iter.done(); iter.next()) {
3821 int ioff = iter.inner_class_info_index();
3822 // Inner class attribute can be zero, skip it.
3823 // Strange but true: JVM spec. allows null inner class refs.
3824 if (ioff == 0) continue;
3825
3826 // only look at classes that are already loaded
3827 // since we are looking for the flags for our self.
3828 Symbol* inner_name = constants()->klass_name_at(ioff);
3829 if (name() == inner_name) {
3830 // This is really a member class.
3831 access = iter.inner_access_flags();
3832 break;
3833 }
3834 }
3835 if (!Arguments::is_valhalla_enabled()) {
3836 // Remember to strip ACC_SUPER bit without Valhalla
3837 access &= (~JVM_ACC_SUPER);
3838 }
3839 return access;
3840 }
3841
3842 jint InstanceKlass::jvmti_class_status() const {
3843 jint result = 0;
3844
3845 if (is_linked()) {
3846 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
3847 }
3848
3849 if (is_initialized()) {
3850 assert(is_linked(), "Class status is not consistent");
3851 result |= JVMTI_CLASS_STATUS_INITIALIZED;
3852 }
3853 if (is_in_error_state()) {
3854 result |= JVMTI_CLASS_STATUS_ERROR;
3855 }
3856 return result;
3857 }
3858
3859 Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPS) {
3860 bool implements_interface; // initialized by method_at_itable_or_null
3861 Method* m = method_at_itable_or_null(holder, index,
3862 implements_interface); // out parameter
3863 if (m != nullptr) {
3864 assert(implements_interface, "sanity");
3865 return m;
3866 } else if (implements_interface) {
3867 // Throw AbstractMethodError since corresponding itable slot is empty.
3868 THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
3869 } else {
3870 // If the interface isn't implemented by the receiver class,
3871 // the VM should throw IncompatibleClassChangeError.
3872 ResourceMark rm(THREAD);
3873 stringStream ss;
3874 bool same_module = (module() == holder->module());
3875 ss.print("Receiver class %s does not implement "
3876 "the interface %s defining the method to be called "
3877 "(%s%s%s)",
3878 external_name(), holder->external_name(),
3879 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
3880 (same_module) ? "" : "; ",
3881 (same_module) ? "" : holder->class_in_module_of_loader());
3882 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string());
3883 }
3884 }
3885
3886 Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) {
3887 klassItable itable(this);
3888 for (int i = 0; i < itable.size_offset_table(); i++) {
3889 itableOffsetEntry* offset_entry = itable.offset_entry(i);
3890 if (offset_entry->interface_klass() == holder) {
3891 implements_interface = true;
3892 itableMethodEntry* ime = offset_entry->first_method_entry(this);
3893 Method* m = ime[index].method();
3894 return m;
3895 }
3896 }
3897 implements_interface = false;
3898 return nullptr; // offset entry not found
3899 }
3900
3901 int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) {
3902 assert(is_linked(), "required");
3903 assert(intf_method->method_holder()->is_interface(), "not an interface method");
3904 assert(is_subtype_of(intf_method->method_holder()), "interface not implemented");
3905
3906 int vtable_index = Method::invalid_vtable_index;
3907 Symbol* name = intf_method->name();
3908 Symbol* signature = intf_method->signature();
3909
3910 // First check in default method array
3911 if (!intf_method->is_abstract() && default_methods() != nullptr) {
3912 int index = find_method_index(default_methods(),
3913 name, signature,
3914 Klass::OverpassLookupMode::find,
3915 Klass::StaticLookupMode::find,
3916 Klass::PrivateLookupMode::find);
3917 if (index >= 0) {
3918 vtable_index = default_vtable_indices()->at(index);
3919 }
3920 }
3921 if (vtable_index == Method::invalid_vtable_index) {
3922 // get vtable_index for miranda methods
3923 klassVtable vt = vtable();
3924 vtable_index = vt.index_of_miranda(name, signature);
3925 }
3926 return vtable_index;
3927 }
3928
3929 #if INCLUDE_JVMTI
3930 // update default_methods for redefineclasses for methods that are
3931 // not yet in the vtable due to concurrent subclass define and superinterface
3932 // redefinition
3933 // Note: those in the vtable, should have been updated via adjust_method_entries
3934 void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
3935 // search the default_methods for uses of either obsolete or EMCP methods
3936 if (default_methods() != nullptr) {
3937 for (int index = 0; index < default_methods()->length(); index ++) {
3938 Method* old_method = default_methods()->at(index);
3939 if (old_method == nullptr || !old_method->is_old()) {
3940 continue; // skip uninteresting entries
3941 }
3942 assert(!old_method->is_deleted(), "default methods may not be deleted");
3943 Method* new_method = old_method->get_new_method();
3944 default_methods()->at_put(index, new_method);
3945
3946 if (log_is_enabled(Info, redefine, class, update)) {
3947 ResourceMark rm;
3948 if (!(*trace_name_printed)) {
3949 log_info(redefine, class, update)
3950 ("adjust: klassname=%s default methods from name=%s",
3951 external_name(), old_method->method_holder()->external_name());
3952 *trace_name_printed = true;
3953 }
3954 log_debug(redefine, class, update, vtables)
3955 ("default method update: %s(%s) ",
3956 new_method->name()->as_C_string(), new_method->signature()->as_C_string());
3957 }
3958 }
3959 }
3960 }
3961 #endif // INCLUDE_JVMTI
3962
3963 // On-stack replacement stuff
3964 void InstanceKlass::add_osr_nmethod(nmethod* n) {
3965 assert_lock_strong(NMethodState_lock);
3966 #ifndef PRODUCT
3967 nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3968 assert(prev == nullptr || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation),
3969 "redundant OSR recompilation detected. memory leak in CodeCache!");
3970 #endif
3971 // only one compilation can be active
3972 assert(n->is_osr_method(), "wrong kind of nmethod");
3973 n->set_osr_link(osr_nmethods_head());
3974 set_osr_nmethods_head(n);
3975 // Raise the highest osr level if necessary
3976 n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level()));
3977
3978 // Get rid of the osr methods for the same bci that have lower levels.
3979 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3980 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3981 if (inv != nullptr && inv->is_in_use()) {
3982 inv->make_not_entrant(nmethod::InvalidationReason::OSR_INVALIDATION_OF_LOWER_LEVEL);
3983 }
3984 }
3985 }
3986
3987 // Remove osr nmethod from the list. Return true if found and removed.
3988 bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3989 // This is a short non-blocking critical region, so the no safepoint check is ok.
3990 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3991 assert(n->is_osr_method(), "wrong kind of nmethod");
3992 nmethod* last = nullptr;
3993 nmethod* cur = osr_nmethods_head();
3994 int max_level = CompLevel_none; // Find the max comp level excluding n
3995 Method* m = n->method();
3996 // Search for match
3997 bool found = false;
3998 while(cur != nullptr && cur != n) {
3999 if (m == cur->method()) {
4000 // Find max level before n
4001 max_level = MAX2(max_level, cur->comp_level());
4002 }
4003 last = cur;
4004 cur = cur->osr_link();
4005 }
4006 nmethod* next = nullptr;
4007 if (cur == n) {
4008 found = true;
4009 next = cur->osr_link();
4010 if (last == nullptr) {
4011 // Remove first element
4012 set_osr_nmethods_head(next);
4013 } else {
4014 last->set_osr_link(next);
4015 }
4016 }
4017 n->set_osr_link(nullptr);
4018 cur = next;
4019 while (cur != nullptr) {
4020 // Find max level after n
4021 if (m == cur->method()) {
4022 max_level = MAX2(max_level, cur->comp_level());
4023 }
4024 cur = cur->osr_link();
4025 }
4026 m->set_highest_osr_comp_level(max_level);
4027 return found;
4028 }
4029
4030 int InstanceKlass::mark_osr_nmethods(DeoptimizationScope* deopt_scope, const Method* m) {
4031 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
4032 nmethod* osr = osr_nmethods_head();
4033 int found = 0;
4034 while (osr != nullptr) {
4035 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
4036 if (osr->method() == m) {
4037 deopt_scope->mark(osr);
4038 found++;
4039 }
4040 osr = osr->osr_link();
4041 }
4042 return found;
4043 }
4044
4045 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
4046 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
4047 nmethod* osr = osr_nmethods_head();
4048 nmethod* best = nullptr;
4049 while (osr != nullptr) {
4050 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
4051 // There can be a time when a c1 osr method exists but we are waiting
4052 // for a c2 version. When c2 completes its osr nmethod we will trash
4053 // the c1 version and only be able to find the c2 version. However
4054 // while we overflow in the c1 code at back branches we don't want to
4055 // try and switch to the same code as we are already running
4056
4057 if (osr->method() == m &&
4058 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
4059 if (match_level) {
4060 if (osr->comp_level() == comp_level) {
4061 // Found a match - return it.
4062 return osr;
4063 }
4064 } else {
4065 if (best == nullptr || (osr->comp_level() > best->comp_level())) {
4066 if (osr->comp_level() == CompilationPolicy::highest_compile_level()) {
4067 // Found the best possible - return it.
4068 return osr;
4069 }
4070 best = osr;
4071 }
4072 }
4073 }
4074 osr = osr->osr_link();
4075 }
4076
4077 assert(match_level == false || best == nullptr, "shouldn't pick up anything if match_level is set");
4078 if (best != nullptr && best->comp_level() >= comp_level) {
4079 return best;
4080 }
4081 return nullptr;
4082 }
4083
4084 // -----------------------------------------------------------------------------------------------------
4085 // Printing
4086
4087 #define BULLET " - "
4088
4089 static const char* state_names[] = {
4090 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
4091 };
4092
4093 static void print_vtable(address self, intptr_t* start, int len, outputStream* st) {
4094 ResourceMark rm;
4095 int* forward_refs = NEW_RESOURCE_ARRAY(int, len);
4096 for (int i = 0; i < len; i++) forward_refs[i] = 0;
4097 for (int i = 0; i < len; i++) {
4098 intptr_t e = start[i];
4099 st->print("%d : " INTPTR_FORMAT, i, e);
4100 if (forward_refs[i] != 0) {
4101 int from = forward_refs[i];
4102 int off = (int) start[from];
4103 st->print(" (offset %d <= [%d])", off, from);
4104 }
4105 if (MetaspaceObj::is_valid((Metadata*)e)) {
4106 st->print(" ");
4107 ((Metadata*)e)->print_value_on(st);
4108 } else if (self != nullptr && e > 0 && e < 0x10000) {
4109 address location = self + e;
4110 int index = (int)((intptr_t*)location - start);
4111 st->print(" (offset %d => [%d])", (int)e, index);
4112 if (index >= 0 && index < len)
4113 forward_refs[index] = i;
4114 }
4115 st->cr();
4116 }
4117 }
4118
4119 static void print_vtable(vtableEntry* start, int len, outputStream* st) {
4120 return print_vtable(nullptr, reinterpret_cast<intptr_t*>(start), len, st);
4121 }
4122
4123 const char* InstanceKlass::init_state_name() const {
4124 return state_names[init_state()];
4125 }
4126
4127 void InstanceKlass::print_on(outputStream* st) const {
4128 assert(is_klass(), "must be klass");
4129 Klass::print_on(st);
4130
4131 st->print(BULLET"instance size: %d", size_helper()); st->cr();
4132 st->print(BULLET"klass size: %d", size()); st->cr();
4133 st->print(BULLET"access: "); access_flags().print_on(st); st->cr();
4134 st->print(BULLET"flags: "); _misc_flags.print_on(st); st->cr();
4135 st->print(BULLET"state: "); st->print_cr("%s", init_state_name());
4136 st->print(BULLET"name: "); name()->print_value_on(st); st->cr();
4137 st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
4138 st->print(BULLET"sub: ");
4139 Klass* sub = subklass();
4140 int n;
4141 for (n = 0; sub != nullptr; n++, sub = sub->next_sibling()) {
4142 if (n < MaxSubklassPrintSize) {
4143 sub->print_value_on(st);
4144 st->print(" ");
4145 }
4146 }
4147 if (n >= MaxSubklassPrintSize) st->print("(%zd more klasses...)", n - MaxSubklassPrintSize);
4148 st->cr();
4149
4150 if (is_interface()) {
4151 st->print_cr(BULLET"nof implementors: %d", nof_implementors());
4152 if (nof_implementors() == 1) {
4153 st->print_cr(BULLET"implementor: ");
4154 st->print(" ");
4155 implementor()->print_value_on(st);
4156 st->cr();
4157 }
4158 }
4159
4160 st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
4161 st->print(BULLET"methods: ");
4162 print_array_on(st, methods(), [](outputStream* ost, Method* method) {
4163 method->print_value_on(ost);
4164 });
4165 st->print(BULLET"method ordering: ");
4166 print_array_on(st, method_ordering(), [](outputStream* ost, int i) {
4167 ost->print("%d", i);
4168 });
4169 if (default_methods() != nullptr) {
4170 st->print(BULLET"default_methods: ");
4171 print_array_on(st, default_methods(), [](outputStream* ost, Method* method) {
4172 method->print_value_on(ost);
4173 });
4174 }
4175 print_on_maybe_null(st, BULLET"default vtable indices: ", default_vtable_indices());
4176 st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr();
4177 st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
4178
4179 st->print(BULLET"secondary supers: "); secondary_supers()->print_value_on(st); st->cr();
4180
4181 st->print(BULLET"hash_slot: %d", hash_slot()); st->cr();
4182 st->print(BULLET"secondary bitmap: " UINTX_FORMAT_X_0, _secondary_supers_bitmap); st->cr();
4183
4184 if (secondary_supers() != nullptr) {
4185 if (Verbose) {
4186 bool is_hashed = (_secondary_supers_bitmap != SECONDARY_SUPERS_BITMAP_FULL);
4187 st->print_cr(BULLET"---- secondary supers (%d words):", _secondary_supers->length());
4188 for (int i = 0; i < _secondary_supers->length(); i++) {
4189 ResourceMark rm; // for external_name()
4190 Klass* secondary_super = _secondary_supers->at(i);
4191 st->print(BULLET"%2d:", i);
4192 if (is_hashed) {
4193 int home_slot = compute_home_slot(secondary_super, _secondary_supers_bitmap);
4194 int distance = (i - home_slot) & SECONDARY_SUPERS_TABLE_MASK;
4195 st->print(" dist:%02d:", distance);
4196 }
4197 st->print_cr(" %p %s", secondary_super, secondary_super->external_name());
4198 }
4199 }
4200 }
4201 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr();
4202
4203 print_on_maybe_null(st, BULLET"class loader data: ", class_loader_data());
4204 print_on_maybe_null(st, BULLET"source file: ", source_file_name());
4205 if (source_debug_extension() != nullptr) {
4206 st->print(BULLET"source debug extension: ");
4207 st->print("%s", source_debug_extension());
4208 st->cr();
4209 }
4210 print_on_maybe_null(st, BULLET"class annotations: ", class_annotations());
4211 print_on_maybe_null(st, BULLET"class type annotations: ", class_type_annotations());
4212 print_on_maybe_null(st, BULLET"field annotations: ", fields_annotations());
4213 print_on_maybe_null(st, BULLET"field type annotations: ", fields_type_annotations());
4214 {
4215 bool have_pv = false;
4216 // previous versions are linked together through the InstanceKlass
4217 for (InstanceKlass* pv_node = previous_versions();
4218 pv_node != nullptr;
4219 pv_node = pv_node->previous_versions()) {
4220 if (!have_pv)
4221 st->print(BULLET"previous version: ");
4222 have_pv = true;
4223 pv_node->constants()->print_value_on(st);
4224 }
4225 if (have_pv) st->cr();
4226 }
4227
4228 print_on_maybe_null(st, BULLET"generic signature: ", generic_signature());
4229 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr();
4230 st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr();
4231 print_on_maybe_null(st, BULLET"record components: ", record_components());
4232 st->print(BULLET"permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr();
4233 st->print(BULLET"loadable descriptors: "); loadable_descriptors()->print_value_on(st); st->cr();
4234 if (java_mirror() != nullptr) {
4235 st->print(BULLET"java mirror: ");
4236 java_mirror()->print_value_on(st);
4237 st->cr();
4238 } else {
4239 st->print_cr(BULLET"java mirror: null");
4240 }
4241 st->print(BULLET"vtable length %d (start addr: " PTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr();
4242 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st);
4243 st->print(BULLET"itable length %d (start addr: " PTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr();
4244 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(nullptr, start_of_itable(), itable_length(), st);
4245
4246 InstanceKlass* ik = const_cast<InstanceKlass*>(this);
4247 // There is no oop so static and nonstatic printing can use the same printer.
4248 FieldPrinter field_printer(st);
4249 st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
4250 ik->do_local_static_fields(&field_printer);
4251 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
4252 ik->print_nonstatic_fields(&field_printer);
4253
4254 st->print(BULLET"non-static oop maps (%d entries): ", nonstatic_oop_map_count());
4255 OopMapBlock* map = start_of_nonstatic_oop_maps();
4256 OopMapBlock* end_map = map + nonstatic_oop_map_count();
4257 while (map < end_map) {
4258 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
4259 map++;
4260 }
4261 st->cr();
4262
4263 if (fieldinfo_search_table() != nullptr) {
4264 st->print_cr(BULLET"---- field info search table:");
4265 FieldInfoStream::print_search_table(st, _constants, _fieldinfo_stream, _fieldinfo_search_table);
4266 }
4267 }
4268
4269 void InstanceKlass::print_value_on(outputStream* st) const {
4270 assert(is_klass(), "must be klass");
4271 if (Verbose || WizardMode) access_flags().print_on(st);
4272 name()->print_value_on(st);
4273 }
4274
4275 void FieldPrinter::do_field(fieldDescriptor* fd) {
4276 for (int i = 0; i < _indent; i++) _st->print(" ");
4277 _st->print(BULLET);
4278 // Handles the cases of static fields or instance fields but no oop is given.
4279 if (_obj == nullptr) {
4280 fd->print_on(_st, _base_offset);
4281 _st->cr();
4282 } else {
4283 fd->print_on_for(_st, _obj, _indent, _base_offset);
4284 if (!fd->field_flags().is_flat()) _st->cr();
4285 }
4286 }
4287
4288
4289 void InstanceKlass::oop_print_on(oop obj, outputStream* st, int indent, int base_offset) {
4290 Klass::oop_print_on(obj, st);
4291
4292 if (this == vmClasses::String_klass()) {
4293 typeArrayOop value = java_lang_String::value(obj);
4294 juint length = java_lang_String::length(obj);
4295 if (value != nullptr &&
4296 value->is_typeArray() &&
4297 length <= (juint) value->length()) {
4298 st->print(BULLET"string: ");
4299 java_lang_String::print(obj, st);
4300 st->cr();
4301 }
4302 }
4303
4304 st->print_cr(BULLET"---- fields (total size %zu words):", oop_size(obj));
4305 FieldPrinter print_field(st, obj, indent, base_offset);
4306 print_nonstatic_fields(&print_field);
4307
4308 if (this == vmClasses::Class_klass()) {
4309 st->print(BULLET"signature: ");
4310 java_lang_Class::print_signature(obj, st);
4311 st->cr();
4312 Klass* real_klass = java_lang_Class::as_Klass(obj);
4313 if (real_klass != nullptr && real_klass->is_instance_klass()) {
4314 st->print_cr(BULLET"---- static fields (%d):", java_lang_Class::static_oop_field_count(obj));
4315 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
4316 }
4317 } else if (this == vmClasses::MethodType_klass()) {
4318 st->print(BULLET"signature: ");
4319 java_lang_invoke_MethodType::print_signature(obj, st);
4320 st->cr();
4321 }
4322 }
4323
4324 #ifndef PRODUCT
4325
4326 bool InstanceKlass::verify_itable_index(int i) {
4327 int method_count = klassItable::method_count_for_interface(this);
4328 assert(i >= 0 && i < method_count, "index out of bounds");
4329 return true;
4330 }
4331
4332 #endif //PRODUCT
4333
4334 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
4335 st->print("a ");
4336 name()->print_value_on(st);
4337 obj->print_address_on(st);
4338 if (this == vmClasses::String_klass()
4339 && java_lang_String::value(obj) != nullptr) {
4340 ResourceMark rm;
4341 int len = java_lang_String::length(obj);
4342 int plen = (len < 24 ? len : 12);
4343 char* str = java_lang_String::as_utf8_string(obj, 0, plen);
4344 st->print(" = \"%s\"", str);
4345 if (len > plen)
4346 st->print("...[%d]", len);
4347 } else if (this == vmClasses::Class_klass()) {
4348 Klass* k = java_lang_Class::as_Klass(obj);
4349 st->print(" = ");
4350 if (k != nullptr) {
4351 k->print_value_on(st);
4352 } else {
4353 const char* tname = type2name(java_lang_Class::primitive_type(obj));
4354 st->print("%s", tname ? tname : "type?");
4355 }
4356 } else if (this == vmClasses::MethodType_klass()) {
4357 st->print(" = ");
4358 java_lang_invoke_MethodType::print_signature(obj, st);
4359 } else if (java_lang_boxing_object::is_instance(obj)) {
4360 st->print(" = ");
4361 java_lang_boxing_object::print(obj, st);
4362 } else if (this == vmClasses::LambdaForm_klass()) {
4363 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
4364 if (vmentry != nullptr) {
4365 st->print(" => ");
4366 vmentry->print_value_on(st);
4367 }
4368 } else if (this == vmClasses::MemberName_klass()) {
4369 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
4370 if (vmtarget != nullptr) {
4371 st->print(" = ");
4372 vmtarget->print_value_on(st);
4373 } else {
4374 oop clazz = java_lang_invoke_MemberName::clazz(obj);
4375 oop name = java_lang_invoke_MemberName::name(obj);
4376 if (clazz != nullptr) {
4377 clazz->print_value_on(st);
4378 } else {
4379 st->print("null");
4380 }
4381 st->print(".");
4382 if (name != nullptr) {
4383 name->print_value_on(st);
4384 } else {
4385 st->print("null");
4386 }
4387 }
4388 }
4389 }
4390
4391 const char* InstanceKlass::internal_name() const {
4392 return external_name();
4393 }
4394
4395 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
4396 const ModuleEntry* module_entry,
4397 const ClassFileStream* cfs) const {
4398
4399 if (ClassListWriter::is_enabled()) {
4400 ClassListWriter::write(this, cfs);
4401 }
4402
4403 print_class_load_helper(loader_data, module_entry, cfs);
4404 print_class_load_cause_logging();
4405 }
4406
4407 void InstanceKlass::print_class_load_helper(ClassLoaderData* loader_data,
4408 const ModuleEntry* module_entry,
4409 const ClassFileStream* cfs) const {
4410
4411 if (!log_is_enabled(Info, class, load)) {
4412 return;
4413 }
4414
4415 ResourceMark rm;
4416 LogMessage(class, load) msg;
4417 stringStream info_stream;
4418
4419 // Name and class hierarchy info
4420 info_stream.print("%s", external_name());
4421
4422 // Source
4423 if (cfs != nullptr) {
4424 if (cfs->source() != nullptr) {
4425 const char* module_name = (module_entry->name() == nullptr) ? UNNAMED_MODULE : module_entry->name()->as_C_string();
4426 if (module_name != nullptr) {
4427 // When the boot loader created the stream, it didn't know the module name
4428 // yet. Let's format it now.
4429 if (cfs->from_boot_loader_modules_image()) {
4430 info_stream.print(" source: jrt:/%s", module_name);
4431 } else {
4432 info_stream.print(" source: %s", cfs->source());
4433 }
4434 } else {
4435 info_stream.print(" source: %s", cfs->source());
4436 }
4437 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
4438 Thread* current = Thread::current();
4439 Klass* caller = current->is_Java_thread() ?
4440 JavaThread::cast(current)->security_get_caller_class(1):
4441 nullptr;
4442 // caller can be null, for example, during a JVMTI VM_Init hook
4443 if (caller != nullptr) {
4444 info_stream.print(" source: instance of %s", caller->external_name());
4445 } else {
4446 // source is unknown
4447 }
4448 } else {
4449 oop class_loader = loader_data->class_loader();
4450 info_stream.print(" source: %s", class_loader->klass()->external_name());
4451 }
4452 } else {
4453 assert(this->in_aot_cache(), "must be");
4454 if (AOTMetaspace::in_aot_cache_dynamic_region((void*)this)) {
4455 info_stream.print(" source: shared objects file (top)");
4456 } else {
4457 info_stream.print(" source: shared objects file");
4458 }
4459 }
4460
4461 msg.info("%s", info_stream.as_string());
4462
4463 if (log_is_enabled(Debug, class, load)) {
4464 stringStream debug_stream;
4465
4466 // Class hierarchy info
4467 debug_stream.print(" klass: " PTR_FORMAT " super: " PTR_FORMAT,
4468 p2i(this), p2i(super()));
4469
4470 // Interfaces
4471 if (local_interfaces() != nullptr && local_interfaces()->length() > 0) {
4472 debug_stream.print(" interfaces:");
4473 int length = local_interfaces()->length();
4474 for (int i = 0; i < length; i++) {
4475 debug_stream.print(" " PTR_FORMAT,
4476 p2i(local_interfaces()->at(i)));
4477 }
4478 }
4479
4480 // Class loader
4481 debug_stream.print(" loader: [");
4482 loader_data->print_value_on(&debug_stream);
4483 debug_stream.print("]");
4484
4485 // Classfile checksum
4486 if (cfs) {
4487 debug_stream.print(" bytes: %d checksum: %08x",
4488 cfs->length(),
4489 ClassLoader::crc32(0, (const char*)cfs->buffer(),
4490 cfs->length()));
4491 }
4492
4493 msg.debug("%s", debug_stream.as_string());
4494 }
4495 }
4496
4497 void InstanceKlass::print_class_load_cause_logging() const {
4498 bool log_cause_native = log_is_enabled(Info, class, load, cause, native);
4499 if (log_cause_native || log_is_enabled(Info, class, load, cause)) {
4500 JavaThread* current = JavaThread::current();
4501 ResourceMark rm(current);
4502 const char* name = external_name();
4503
4504 if (LogClassLoadingCauseFor == nullptr ||
4505 (strcmp("*", LogClassLoadingCauseFor) != 0 &&
4506 strstr(name, LogClassLoadingCauseFor) == nullptr)) {
4507 return;
4508 }
4509
4510 // Log Java stack first
4511 {
4512 LogMessage(class, load, cause) msg;
4513 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4514
4515 info_stream.print_cr("Java stack when loading %s:", name);
4516 current->print_stack_on(&info_stream);
4517 }
4518
4519 // Log native stack second
4520 if (log_cause_native) {
4521 // Log to string first so that lines can be indented
4522 stringStream stack_stream;
4523 char buf[O_BUFLEN];
4524 address lastpc = nullptr;
4525 NativeStackPrinter nsp(current);
4526 nsp.print_stack(&stack_stream, buf, sizeof(buf), lastpc,
4527 true /* print_source_info */, -1 /* max stack */);
4528
4529 LogMessage(class, load, cause, native) msg;
4530 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4531 info_stream.print_cr("Native stack when loading %s:", name);
4532
4533 // Print each native stack line to the log
4534 int size = (int) stack_stream.size();
4535 char* stack = stack_stream.as_string();
4536 char* stack_end = stack + size;
4537 char* line_start = stack;
4538 for (char* p = stack; p < stack_end; p++) {
4539 if (*p == '\n') {
4540 *p = '\0';
4541 info_stream.print_cr("\t%s", line_start);
4542 line_start = p + 1;
4543 }
4544 }
4545 if (line_start < stack_end) {
4546 info_stream.print_cr("\t%s", line_start);
4547 }
4548 }
4549 }
4550 }
4551
4552 // Verification
4553
4554 class VerifyFieldClosure: public BasicOopIterateClosure {
4555 protected:
4556 template <class T> void do_oop_work(T* p) {
4557 oop obj = RawAccess<>::oop_load(p);
4558 if (!oopDesc::is_oop_or_null(obj)) {
4559 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj));
4560 Universe::print_on(tty);
4561 guarantee(false, "boom");
4562 }
4563 }
4564 public:
4565 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); }
4566 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
4567 };
4568
4569 void InstanceKlass::verify_on(outputStream* st) {
4570 #ifndef PRODUCT
4571 // Avoid redundant verifies, this really should be in product.
4572 if (_verify_count == Universe::verify_count()) return;
4573 _verify_count = Universe::verify_count();
4574 #endif
4575
4576 // Verify Klass
4577 Klass::verify_on(st);
4578
4579 // Verify that klass is present in ClassLoaderData
4580 guarantee(class_loader_data()->contains_klass(this),
4581 "this class isn't found in class loader data");
4582
4583 // Verify vtables
4584 if (is_linked()) {
4585 // $$$ This used to be done only for m/s collections. Doing it
4586 // always seemed a valid generalization. (DLD -- 6/00)
4587 vtable().verify(st);
4588 }
4589
4590 // Verify first subklass
4591 if (subklass() != nullptr) {
4592 guarantee(subklass()->is_klass(), "should be klass");
4593 }
4594
4595 // Verify siblings
4596 Klass* super = this->super();
4597 Klass* sib = next_sibling();
4598 if (sib != nullptr) {
4599 if (sib == this) {
4600 fatal("subclass points to itself " PTR_FORMAT, p2i(sib));
4601 }
4602
4603 guarantee(sib->is_klass(), "should be klass");
4604 guarantee(sib->super() == super, "siblings should have same superklass");
4605 }
4606
4607 // Verify local interfaces
4608 if (local_interfaces()) {
4609 Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
4610 for (int j = 0; j < local_interfaces->length(); j++) {
4611 InstanceKlass* e = local_interfaces->at(j);
4612 guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
4613 }
4614 }
4615
4616 // Verify transitive interfaces
4617 if (transitive_interfaces() != nullptr) {
4618 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
4619 for (int j = 0; j < transitive_interfaces->length(); j++) {
4620 InstanceKlass* e = transitive_interfaces->at(j);
4621 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
4622 }
4623 }
4624
4625 // Verify methods
4626 if (methods() != nullptr) {
4627 Array<Method*>* methods = this->methods();
4628 for (int j = 0; j < methods->length(); j++) {
4629 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4630 }
4631 for (int j = 0; j < methods->length() - 1; j++) {
4632 Method* m1 = methods->at(j);
4633 Method* m2 = methods->at(j + 1);
4634 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4635 }
4636 }
4637
4638 // Verify method ordering
4639 if (method_ordering() != nullptr) {
4640 Array<int>* method_ordering = this->method_ordering();
4641 int length = method_ordering->length();
4642 if (JvmtiExport::can_maintain_original_method_order() ||
4643 ((CDSConfig::is_using_archive() || CDSConfig::is_dumping_archive()) && length != 0)) {
4644 guarantee(length == methods()->length(), "invalid method ordering length");
4645 jlong sum = 0;
4646 for (int j = 0; j < length; j++) {
4647 int original_index = method_ordering->at(j);
4648 guarantee(original_index >= 0, "invalid method ordering index");
4649 guarantee(original_index < length, "invalid method ordering index");
4650 sum += original_index;
4651 }
4652 // Verify sum of indices 0,1,...,length-1
4653 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
4654 } else {
4655 guarantee(length == 0, "invalid method ordering length");
4656 }
4657 }
4658
4659 // Verify default methods
4660 if (default_methods() != nullptr) {
4661 Array<Method*>* methods = this->default_methods();
4662 for (int j = 0; j < methods->length(); j++) {
4663 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4664 }
4665 for (int j = 0; j < methods->length() - 1; j++) {
4666 Method* m1 = methods->at(j);
4667 Method* m2 = methods->at(j + 1);
4668 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4669 }
4670 }
4671
4672 // Verify JNI static field identifiers
4673 if (jni_ids() != nullptr) {
4674 jni_ids()->verify(this);
4675 }
4676
4677 // Verify other fields
4678 if (constants() != nullptr) {
4679 guarantee(constants()->is_constantPool(), "should be constant pool");
4680 }
4681 }
4682
4683 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
4684 Klass::oop_verify_on(obj, st);
4685 VerifyFieldClosure blk;
4686 obj->oop_iterate(&blk);
4687 }
4688
4689 // JNIid class for jfieldIDs only
4690 // Note to reviewers:
4691 // These JNI functions are just moved over to column 1 and not changed
4692 // in the compressed oops workspace.
4693 JNIid::JNIid(InstanceKlass* holder, int offset, JNIid* next) {
4694 _holder = holder;
4695 _offset = offset;
4696 _next = next;
4697 DEBUG_ONLY(_is_static_field_id = false;)
4698 }
4699
4700 JNIid* JNIid::find(int offset) {
4701 JNIid* current = this;
4702 while (current != nullptr) {
4703 if (current->offset() == offset) return current;
4704 current = current->next();
4705 }
4706 return nullptr;
4707 }
4708
4709 void JNIid::deallocate(JNIid* current) {
4710 while (current != nullptr) {
4711 JNIid* next = current->next();
4712 delete current;
4713 current = next;
4714 }
4715 }
4716
4717 void JNIid::verify(InstanceKlass* holder) {
4718 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields();
4719 int end_field_offset;
4720 end_field_offset = first_field_offset + (holder->static_field_size() * wordSize);
4721
4722 JNIid* current = this;
4723 while (current != nullptr) {
4724 guarantee(current->holder() == holder, "Invalid klass in JNIid");
4725 #ifdef ASSERT
4726 int o = current->offset();
4727 if (current->is_static_field_id()) {
4728 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid");
4729 }
4730 #endif
4731 current = current->next();
4732 }
4733 }
4734
4735 void InstanceKlass::set_init_state(ClassState state) {
4736 #ifdef ASSERT
4737 bool good_state = in_aot_cache() ? (_init_state <= state)
4738 : (_init_state < state);
4739 assert(good_state || state == allocated, "illegal state transition");
4740 #endif
4741 assert(_init_thread == nullptr, "should be cleared before state change");
4742 AtomicAccess::release_store(&_init_state, state);
4743 }
4744
4745 #if INCLUDE_JVMTI
4746
4747 // RedefineClasses() support for previous versions
4748
4749 // Globally, there is at least one previous version of a class to walk
4750 // during class unloading, which is saved because old methods in the class
4751 // are still running. Otherwise the previous version list is cleaned up.
4752 bool InstanceKlass::_should_clean_previous_versions = false;
4753
4754 // Returns true if there are previous versions of a class for class
4755 // unloading only. Also resets the flag to false. purge_previous_version
4756 // will set the flag to true if there are any left, i.e., if there's any
4757 // work to do for next time. This is to avoid the expensive code cache
4758 // walk in CLDG::clean_deallocate_lists().
4759 bool InstanceKlass::should_clean_previous_versions_and_reset() {
4760 bool ret = _should_clean_previous_versions;
4761 log_trace(redefine, class, iklass, purge)("Class unloading: should_clean_previous_versions = %s",
4762 ret ? "true" : "false");
4763 _should_clean_previous_versions = false;
4764 return ret;
4765 }
4766
4767 // This nulls out the jmethodID for all obsolete methods in the previous version of the 'klass'.
4768 // These obsolete methods only exist in the previous version and we're about to delete the memory for them.
4769 // The jmethodID for these are deallocated when we unload the class, so this doesn't remove them from the table.
4770 void InstanceKlass::clear_obsolete_jmethod_ids(InstanceKlass* klass) {
4771 Array<Method*>* method_refs = klass->methods();
4772 for (int k = 0; k < method_refs->length(); k++) {
4773 Method* method = method_refs->at(k);
4774 // Only need to clear obsolete methods.
4775 if (method != nullptr && method->is_obsolete()) {
4776 method->clear_jmethod_id();
4777 }
4778 }
4779 }
4780
4781 // Purge previous versions before adding new previous versions of the class and
4782 // during class unloading.
4783 void InstanceKlass::purge_previous_version_list() {
4784 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
4785 assert(has_been_redefined(), "Should only be called for main class");
4786
4787 // Quick exit.
4788 if (previous_versions() == nullptr) {
4789 return;
4790 }
4791
4792 // This klass has previous versions so see what we can cleanup
4793 // while it is safe to do so.
4794
4795 int deleted_count = 0; // leave debugging breadcrumbs
4796 int live_count = 0;
4797 ClassLoaderData* loader_data = class_loader_data();
4798 assert(loader_data != nullptr, "should never be null");
4799
4800 ResourceMark rm;
4801 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name());
4802
4803 // previous versions are linked together through the InstanceKlass
4804 InstanceKlass* pv_node = previous_versions();
4805 InstanceKlass* last = this;
4806 int version = 0;
4807
4808 // check the previous versions list
4809 for (; pv_node != nullptr; ) {
4810
4811 ConstantPool* pvcp = pv_node->constants();
4812 assert(pvcp != nullptr, "cp ref was unexpectedly cleared");
4813
4814 if (!pvcp->on_stack()) {
4815 // If the constant pool isn't on stack, none of the methods
4816 // are executing. Unlink this previous_version.
4817 // The previous version InstanceKlass is on the ClassLoaderData deallocate list
4818 // so will be deallocated during the next phase of class unloading.
4819 log_trace(redefine, class, iklass, purge)
4820 ("previous version " PTR_FORMAT " is dead.", p2i(pv_node));
4821 // Unlink from previous version list.
4822 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data");
4823 InstanceKlass* next = pv_node->previous_versions();
4824 clear_obsolete_jmethod_ids(pv_node); // jmethodID maintenance for the unloaded class
4825 pv_node->link_previous_versions(nullptr); // point next to null
4826 last->link_previous_versions(next);
4827 // Delete this node directly. Nothing is referring to it and we don't
4828 // want it to increase the counter for metadata to delete in CLDG.
4829 MetadataFactory::free_metadata(loader_data, pv_node);
4830 pv_node = next;
4831 deleted_count++;
4832 version++;
4833 continue;
4834 } else {
4835 assert(pvcp->pool_holder() != nullptr, "Constant pool with no holder");
4836 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
4837 live_count++;
4838 if (pvcp->in_aot_cache()) {
4839 // Shared previous versions can never be removed so no cleaning is needed.
4840 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is shared", p2i(pv_node));
4841 } else {
4842 // Previous version alive, set that clean is needed for next time.
4843 _should_clean_previous_versions = true;
4844 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is alive", p2i(pv_node));
4845 }
4846 }
4847
4848 // next previous version
4849 last = pv_node;
4850 pv_node = pv_node->previous_versions();
4851 version++;
4852 }
4853 log_trace(redefine, class, iklass, purge)
4854 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
4855 }
4856
4857 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
4858 int emcp_method_count) {
4859 int obsolete_method_count = old_methods->length() - emcp_method_count;
4860
4861 if (emcp_method_count != 0 && obsolete_method_count != 0 &&
4862 _previous_versions != nullptr) {
4863 // We have a mix of obsolete and EMCP methods so we have to
4864 // clear out any matching EMCP method entries the hard way.
4865 int local_count = 0;
4866 for (int i = 0; i < old_methods->length(); i++) {
4867 Method* old_method = old_methods->at(i);
4868 if (old_method->is_obsolete()) {
4869 // only obsolete methods are interesting
4870 Symbol* m_name = old_method->name();
4871 Symbol* m_signature = old_method->signature();
4872
4873 // previous versions are linked together through the InstanceKlass
4874 int j = 0;
4875 for (InstanceKlass* prev_version = _previous_versions;
4876 prev_version != nullptr;
4877 prev_version = prev_version->previous_versions(), j++) {
4878
4879 Array<Method*>* method_refs = prev_version->methods();
4880 for (int k = 0; k < method_refs->length(); k++) {
4881 Method* method = method_refs->at(k);
4882
4883 if (!method->is_obsolete() &&
4884 method->name() == m_name &&
4885 method->signature() == m_signature) {
4886 // The current RedefineClasses() call has made all EMCP
4887 // versions of this method obsolete so mark it as obsolete
4888 log_trace(redefine, class, iklass, add)
4889 ("%s(%s): flush obsolete method @%d in version @%d",
4890 m_name->as_C_string(), m_signature->as_C_string(), k, j);
4891
4892 method->set_is_obsolete();
4893 break;
4894 }
4895 }
4896
4897 // The previous loop may not find a matching EMCP method, but
4898 // that doesn't mean that we can optimize and not go any
4899 // further back in the PreviousVersion generations. The EMCP
4900 // method for this generation could have already been made obsolete,
4901 // but there still may be an older EMCP method that has not
4902 // been made obsolete.
4903 }
4904
4905 if (++local_count >= obsolete_method_count) {
4906 // no more obsolete methods so bail out now
4907 break;
4908 }
4909 }
4910 }
4911 }
4912 }
4913
4914 // Save the scratch_class as the previous version if any of the methods are running.
4915 // The previous_versions are used to set breakpoints in EMCP methods and they are
4916 // also used to clean MethodData links to redefined methods that are no longer running.
4917 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
4918 int emcp_method_count) {
4919 assert(Thread::current()->is_VM_thread(),
4920 "only VMThread can add previous versions");
4921
4922 ResourceMark rm;
4923 log_trace(redefine, class, iklass, add)
4924 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
4925
4926 // Clean out old previous versions for this class
4927 purge_previous_version_list();
4928
4929 // Mark newly obsolete methods in remaining previous versions. An EMCP method from
4930 // a previous redefinition may be made obsolete by this redefinition.
4931 Array<Method*>* old_methods = scratch_class->methods();
4932 mark_newly_obsolete_methods(old_methods, emcp_method_count);
4933
4934 // If the constant pool for this previous version of the class
4935 // is not marked as being on the stack, then none of the methods
4936 // in this previous version of the class are on the stack so
4937 // we don't need to add this as a previous version.
4938 ConstantPool* cp_ref = scratch_class->constants();
4939 if (!cp_ref->on_stack()) {
4940 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
4941 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
4942 return;
4943 }
4944
4945 // Add previous version if any methods are still running or if this is
4946 // a shared class which should never be removed.
4947 assert(scratch_class->previous_versions() == nullptr, "shouldn't have a previous version");
4948 scratch_class->link_previous_versions(previous_versions());
4949 link_previous_versions(scratch_class);
4950 if (cp_ref->in_aot_cache()) {
4951 log_trace(redefine, class, iklass, add) ("scratch class added; class is shared");
4952 } else {
4953 // We only set clean_previous_versions flag for processing during class
4954 // unloading for non-shared classes.
4955 _should_clean_previous_versions = true;
4956 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
4957 }
4958 } // end add_previous_version()
4959
4960 #endif // INCLUDE_JVMTI
4961
4962 Method* InstanceKlass::method_with_idnum(int idnum) const {
4963 Method* m = nullptr;
4964 if (idnum < methods()->length()) {
4965 m = methods()->at(idnum);
4966 }
4967 if (m == nullptr || m->method_idnum() != idnum) {
4968 for (int index = 0; index < methods()->length(); ++index) {
4969 m = methods()->at(index);
4970 if (m->method_idnum() == idnum) {
4971 return m;
4972 }
4973 }
4974 // None found, return null for the caller to handle.
4975 return nullptr;
4976 }
4977 return m;
4978 }
4979
4980
4981 Method* InstanceKlass::method_with_orig_idnum(int idnum) const {
4982 if (idnum >= methods()->length()) {
4983 return nullptr;
4984 }
4985 Method* m = methods()->at(idnum);
4986 if (m != nullptr && m->orig_method_idnum() == idnum) {
4987 return m;
4988 }
4989 // Obsolete method idnum does not match the original idnum
4990 for (int index = 0; index < methods()->length(); ++index) {
4991 m = methods()->at(index);
4992 if (m->orig_method_idnum() == idnum) {
4993 return m;
4994 }
4995 }
4996 // None found, return null for the caller to handle.
4997 return nullptr;
4998 }
4999
5000
5001 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) const {
5002 const InstanceKlass* holder = get_klass_version(version);
5003 if (holder == nullptr) {
5004 return nullptr; // The version of klass is gone, no method is found
5005 }
5006 return holder->method_with_orig_idnum(idnum);
5007 }
5008
5009 #if INCLUDE_JVMTI
5010 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
5011 return _cached_class_file;
5012 }
5013
5014 jint InstanceKlass::get_cached_class_file_len() {
5015 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
5016 }
5017
5018 unsigned char * InstanceKlass::get_cached_class_file_bytes() {
5019 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
5020 }
5021 #endif
5022
5023 // Make a step iterating over the class hierarchy under the root class.
5024 // Skips subclasses if requested.
5025 void ClassHierarchyIterator::next() {
5026 assert(_current != nullptr, "required");
5027 if (_visit_subclasses && _current->subklass() != nullptr) {
5028 _current = _current->subklass();
5029 return; // visit next subclass
5030 }
5031 _visit_subclasses = true; // reset
5032 while (_current->next_sibling() == nullptr && _current != _root) {
5033 _current = _current->java_super(); // backtrack; no more sibling subclasses left
5034 }
5035 if (_current == _root) {
5036 // Iteration is over (back at root after backtracking). Invalidate the iterator.
5037 _current = nullptr;
5038 return;
5039 }
5040 _current = _current->next_sibling();
5041 return; // visit next sibling subclass
5042 }