1 /*
2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotClassInitializer.hpp"
26 #include "cds/aotMetaspace.hpp"
27 #include "cds/archiveUtils.hpp"
28 #include "cds/cdsConfig.hpp"
29 #include "cds/cdsEnumKlass.hpp"
30 #include "cds/classListWriter.hpp"
31 #include "cds/heapShared.hpp"
32 #include "classfile/classFileParser.hpp"
33 #include "classfile/classFileStream.hpp"
34 #include "classfile/classLoader.hpp"
35 #include "classfile/classLoaderData.inline.hpp"
36 #include "classfile/javaClasses.hpp"
37 #include "classfile/moduleEntry.hpp"
38 #include "classfile/systemDictionary.hpp"
39 #include "classfile/systemDictionaryShared.hpp"
40 #include "classfile/verifier.hpp"
41 #include "classfile/vmClasses.hpp"
42 #include "classfile/vmSymbols.hpp"
43 #include "code/codeCache.hpp"
44 #include "code/dependencyContext.hpp"
45 #include "compiler/compilationPolicy.hpp"
46 #include "compiler/compileBroker.hpp"
47 #include "gc/shared/collectedHeap.inline.hpp"
48 #include "interpreter/bytecodeStream.hpp"
49 #include "interpreter/oopMapCache.hpp"
50 #include "interpreter/rewriter.hpp"
51 #include "jvm.h"
52 #include "jvmtifiles/jvmti.h"
53 #include "klass.inline.hpp"
54 #include "logging/log.hpp"
55 #include "logging/logMessage.hpp"
56 #include "logging/logStream.hpp"
57 #include "memory/allocation.inline.hpp"
58 #include "memory/iterator.inline.hpp"
59 #include "memory/metadataFactory.hpp"
60 #include "memory/metaspaceClosure.hpp"
61 #include "memory/oopFactory.hpp"
62 #include "memory/resourceArea.hpp"
63 #include "memory/universe.hpp"
64 #include "oops/constantPool.hpp"
65 #include "oops/fieldStreams.inline.hpp"
66 #include "oops/inlineKlass.hpp"
67 #include "oops/instanceClassLoaderKlass.hpp"
68 #include "oops/instanceKlass.inline.hpp"
69 #include "oops/instanceMirrorKlass.hpp"
70 #include "oops/instanceOop.hpp"
71 #include "oops/instanceStackChunkKlass.hpp"
72 #include "oops/klass.inline.hpp"
73 #include "oops/markWord.hpp"
74 #include "oops/method.hpp"
75 #include "oops/oop.inline.hpp"
76 #include "oops/recordComponent.hpp"
77 #include "oops/refArrayKlass.hpp"
78 #include "oops/symbol.hpp"
79 #include "prims/jvmtiExport.hpp"
80 #include "prims/jvmtiRedefineClasses.hpp"
81 #include "prims/jvmtiThreadState.hpp"
82 #include "prims/methodComparator.hpp"
83 #include "runtime/arguments.hpp"
84 #include "runtime/atomicAccess.hpp"
85 #include "runtime/deoptimization.hpp"
86 #include "runtime/fieldDescriptor.inline.hpp"
87 #include "runtime/handles.inline.hpp"
88 #include "runtime/javaCalls.hpp"
89 #include "runtime/javaThread.inline.hpp"
90 #include "runtime/mutexLocker.hpp"
91 #include "runtime/orderAccess.hpp"
92 #include "runtime/os.inline.hpp"
93 #include "runtime/reflection.hpp"
94 #include "runtime/synchronizer.hpp"
95 #include "runtime/threads.hpp"
96 #include "services/classLoadingService.hpp"
97 #include "services/finalizerService.hpp"
98 #include "services/threadService.hpp"
99 #include "utilities/dtrace.hpp"
100 #include "utilities/events.hpp"
101 #include "utilities/macros.hpp"
102 #include "utilities/nativeStackPrinter.hpp"
103 #include "utilities/stringUtils.hpp"
104 #ifdef COMPILER1
105 #include "c1/c1_Compiler.hpp"
106 #endif
107 #if INCLUDE_JFR
108 #include "jfr/jfrEvents.hpp"
109 #endif
110
111 #ifdef DTRACE_ENABLED
112
113
114 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
115 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
116 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
117 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
118 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
119 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
120 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
121 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
122 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \
123 { \
124 char* data = nullptr; \
125 int len = 0; \
126 Symbol* clss_name = name(); \
127 if (clss_name != nullptr) { \
128 data = (char*)clss_name->bytes(); \
129 len = clss_name->utf8_length(); \
130 } \
131 HOTSPOT_CLASS_INITIALIZATION_##type( \
132 data, len, (void*)class_loader(), thread_type); \
133 }
134
135 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \
136 { \
137 char* data = nullptr; \
138 int len = 0; \
139 Symbol* clss_name = name(); \
140 if (clss_name != nullptr) { \
141 data = (char*)clss_name->bytes(); \
142 len = clss_name->utf8_length(); \
143 } \
144 HOTSPOT_CLASS_INITIALIZATION_##type( \
145 data, len, (void*)class_loader(), thread_type, wait); \
146 }
147
148 #else // ndef DTRACE_ENABLED
149
150 #define DTRACE_CLASSINIT_PROBE(type, thread_type)
151 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
152
153 #endif // ndef DTRACE_ENABLED
154
155 void InlineLayoutInfo::metaspace_pointers_do(MetaspaceClosure* it) {
156 log_trace(cds)("Iter(InlineFieldInfo): %p", this);
157 it->push(&_klass);
158 }
159
160 bool InstanceKlass::_finalization_enabled = true;
161
162 static inline bool is_class_loader(const Symbol* class_name,
163 const ClassFileParser& parser) {
164 assert(class_name != nullptr, "invariant");
165
166 if (class_name == vmSymbols::java_lang_ClassLoader()) {
167 return true;
168 }
169
170 if (vmClasses::ClassLoader_klass_is_loaded()) {
171 const Klass* const super_klass = parser.super_klass();
172 if (super_klass != nullptr) {
173 if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) {
174 return true;
175 }
176 }
177 }
178 return false;
179 }
180
181 bool InstanceKlass::field_is_null_free_inline_type(int index) const {
182 return field(index).field_flags().is_null_free_inline_type();
183 }
184
185 bool InstanceKlass::is_class_in_loadable_descriptors_attribute(Symbol* name) const {
186 if (_loadable_descriptors == nullptr) return false;
187 for (int i = 0; i < _loadable_descriptors->length(); i++) {
188 Symbol* class_name = _constants->symbol_at(_loadable_descriptors->at(i));
189 if (class_name == name) return true;
190 }
191 return false;
192 }
193
194 static inline bool is_stack_chunk_class(const Symbol* class_name,
195 const ClassLoaderData* loader_data) {
196 return (class_name == vmSymbols::jdk_internal_vm_StackChunk() &&
197 loader_data->is_the_null_class_loader_data());
198 }
199
200 // private: called to verify that k is a static member of this nest.
201 // We know that k is an instance class in the same package and hence the
202 // same classloader.
203 bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const {
204 assert(!is_hidden(), "unexpected hidden class");
205 if (_nest_members == nullptr || _nest_members == Universe::the_empty_short_array()) {
206 if (log_is_enabled(Trace, class, nestmates)) {
207 ResourceMark rm(current);
208 log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s",
209 k->external_name(), this->external_name());
210 }
211 return false;
212 }
213
214 if (log_is_enabled(Trace, class, nestmates)) {
215 ResourceMark rm(current);
216 log_trace(class, nestmates)("Checking nest membership of %s in %s",
217 k->external_name(), this->external_name());
218 }
219
220 // Check for the named class in _nest_members.
221 // We don't resolve, or load, any classes.
222 for (int i = 0; i < _nest_members->length(); i++) {
223 int cp_index = _nest_members->at(i);
224 Symbol* name = _constants->klass_name_at(cp_index);
225 if (name == k->name()) {
226 log_trace(class, nestmates)("- named class found at nest_members[%d] => cp[%d]", i, cp_index);
227 return true;
228 }
229 }
230 log_trace(class, nestmates)("- class is NOT a nest member!");
231 return false;
232 }
233
234 // Called to verify that k is a permitted subclass of this class.
235 // The incoming stringStream is used to format the messages for error logging and for the caller
236 // to use for exception throwing.
237 bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k, stringStream& ss) const {
238 Thread* current = Thread::current();
239 assert(k != nullptr, "sanity check");
240 assert(_permitted_subclasses != nullptr && _permitted_subclasses != Universe::the_empty_short_array(),
241 "unexpected empty _permitted_subclasses array");
242
243 if (log_is_enabled(Trace, class, sealed)) {
244 ResourceMark rm(current);
245 log_trace(class, sealed)("Checking for permitted subclass %s in %s",
246 k->external_name(), this->external_name());
247 }
248
249 // Check that the class and its super are in the same module.
250 if (k->module() != this->module()) {
251 ss.print("Failed same module check: subclass %s is in module '%s' with loader %s, "
252 "and sealed class %s is in module '%s' with loader %s",
253 k->external_name(),
254 k->module()->name_as_C_string(),
255 k->module()->loader_data()->loader_name_and_id(),
256 this->external_name(),
257 this->module()->name_as_C_string(),
258 this->module()->loader_data()->loader_name_and_id());
259 log_trace(class, sealed)(" - %s", ss.as_string());
260 return false;
261 }
262
263 if (!k->is_public() && !is_same_class_package(k)) {
264 ss.print("Failed same package check: non-public subclass %s is in package '%s' with classloader %s, "
265 "and sealed class %s is in package '%s' with classloader %s",
266 k->external_name(),
267 k->package() != nullptr ? k->package()->name()->as_C_string() : "unnamed",
268 k->module()->loader_data()->loader_name_and_id(),
269 this->external_name(),
270 this->package() != nullptr ? this->package()->name()->as_C_string() : "unnamed",
271 this->module()->loader_data()->loader_name_and_id());
272 log_trace(class, sealed)(" - %s", ss.as_string());
273 return false;
274 }
275
276 for (int i = 0; i < _permitted_subclasses->length(); i++) {
277 int cp_index = _permitted_subclasses->at(i);
278 Symbol* name = _constants->klass_name_at(cp_index);
279 if (name == k->name()) {
280 log_trace(class, sealed)("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index);
281 return true;
282 }
283 }
284
285 ss.print("Failed listed permitted subclass check: class %s is not a permitted subclass of %s",
286 k->external_name(), this->external_name());
287 log_trace(class, sealed)(" - %s", ss.as_string());
288 return false;
289 }
290
291 // Return nest-host class, resolving, validating and saving it if needed.
292 // In cases where this is called from a thread that cannot do classloading
293 // (such as a native JIT thread) then we simply return null, which in turn
294 // causes the access check to return false. Such code will retry the access
295 // from a more suitable environment later. Otherwise the _nest_host is always
296 // set once this method returns.
297 // Any errors from nest-host resolution must be preserved so they can be queried
298 // from higher-level access checking code, and reported as part of access checking
299 // exceptions.
300 // VirtualMachineErrors are propagated with a null return.
301 // Under any conditions where the _nest_host can be set to non-null the resulting
302 // value of it and, if applicable, the nest host resolution/validation error,
303 // are idempotent.
304 InstanceKlass* InstanceKlass::nest_host(TRAPS) {
305 InstanceKlass* nest_host_k = _nest_host;
306 if (nest_host_k != nullptr) {
307 return nest_host_k;
308 }
309
310 ResourceMark rm(THREAD);
311
312 // need to resolve and save our nest-host class.
313 if (_nest_host_index != 0) { // we have a real nest_host
314 // Before trying to resolve check if we're in a suitable context
315 bool can_resolve = THREAD->can_call_java();
316 if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) {
317 log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread",
318 this->external_name());
319 return nullptr; // sentinel to say "try again from a different context"
320 }
321
322 log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s",
323 this->external_name(),
324 _constants->klass_name_at(_nest_host_index)->as_C_string());
325
326 Klass* k = _constants->klass_at(_nest_host_index, THREAD);
327 if (HAS_PENDING_EXCEPTION) {
328 if (PENDING_EXCEPTION->is_a(vmClasses::VirtualMachineError_klass())) {
329 return nullptr; // propagate VMEs
330 }
331 stringStream ss;
332 char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string();
333 ss.print("Nest host resolution of %s with host %s failed: ",
334 this->external_name(), target_host_class);
335 java_lang_Throwable::print(PENDING_EXCEPTION, &ss);
336 const char* msg = ss.as_string(true /* on C-heap */);
337 constantPoolHandle cph(THREAD, constants());
338 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
339 CLEAR_PENDING_EXCEPTION;
340
341 log_trace(class, nestmates)("%s", msg);
342 } else {
343 // A valid nest-host is an instance class in the current package that lists this
344 // class as a nest member. If any of these conditions are not met the class is
345 // its own nest-host.
346 const char* error = nullptr;
347
348 // JVMS 5.4.4 indicates package check comes first
349 if (is_same_class_package(k)) {
350 // Now check actual membership. We can't be a member if our "host" is
351 // not an instance class.
352 if (k->is_instance_klass()) {
353 nest_host_k = InstanceKlass::cast(k);
354 bool is_member = nest_host_k->has_nest_member(THREAD, this);
355 if (is_member) {
356 _nest_host = nest_host_k; // save resolved nest-host value
357
358 log_trace(class, nestmates)("Resolved nest-host of %s to %s",
359 this->external_name(), k->external_name());
360 return nest_host_k;
361 } else {
362 error = "current type is not listed as a nest member";
363 }
364 } else {
365 error = "host is not an instance class";
366 }
367 } else {
368 error = "types are in different packages";
369 }
370
371 // something went wrong, so record what and log it
372 {
373 stringStream ss;
374 ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s",
375 this->external_name(),
376 this->class_loader_data()->loader_name_and_id(),
377 k->external_name(),
378 k->class_loader_data()->loader_name_and_id(),
379 error);
380 const char* msg = ss.as_string(true /* on C-heap */);
381 constantPoolHandle cph(THREAD, constants());
382 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
383 log_trace(class, nestmates)("%s", msg);
384 }
385 }
386 } else {
387 log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self",
388 this->external_name());
389 }
390
391 // Either not in an explicit nest, or else an error occurred, so
392 // the nest-host is set to `this`. Any thread that sees this assignment
393 // will also see any setting of nest_host_error(), if applicable.
394 return (_nest_host = this);
395 }
396
397 // Dynamic nest member support: set this class's nest host to the given class.
398 // This occurs as part of the class definition, as soon as the instanceKlass
399 // has been created and doesn't require further resolution. The code:
400 // lookup().defineHiddenClass(bytes_for_X, NESTMATE);
401 // results in:
402 // class_of_X.set_nest_host(lookup().lookupClass().getNestHost())
403 // If it has an explicit _nest_host_index or _nest_members, these will be ignored.
404 // We also know the "host" is a valid nest-host in the same package so we can
405 // assert some of those facts.
406 void InstanceKlass::set_nest_host(InstanceKlass* host) {
407 assert(is_hidden(), "must be a hidden class");
408 assert(host != nullptr, "null nest host specified");
409 assert(_nest_host == nullptr, "current class has resolved nest-host");
410 assert(nest_host_error() == nullptr, "unexpected nest host resolution error exists: %s",
411 nest_host_error());
412 assert((host->_nest_host == nullptr && host->_nest_host_index == 0) ||
413 (host->_nest_host == host), "proposed host is not a valid nest-host");
414 // Can't assert this as package is not set yet:
415 // assert(is_same_class_package(host), "proposed host is in wrong package");
416
417 if (log_is_enabled(Trace, class, nestmates)) {
418 ResourceMark rm;
419 const char* msg = "";
420 // a hidden class does not expect a statically defined nest-host
421 if (_nest_host_index > 0) {
422 msg = "(the NestHost attribute in the current class is ignored)";
423 } else if (_nest_members != nullptr && _nest_members != Universe::the_empty_short_array()) {
424 msg = "(the NestMembers attribute in the current class is ignored)";
425 }
426 log_trace(class, nestmates)("Injected type %s into the nest of %s %s",
427 this->external_name(),
428 host->external_name(),
429 msg);
430 }
431 // set dynamic nest host
432 _nest_host = host;
433 // Record dependency to keep nest host from being unloaded before this class.
434 ClassLoaderData* this_key = class_loader_data();
435 assert(this_key != nullptr, "sanity");
436 this_key->record_dependency(host);
437 }
438
439 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
440 // or we are k's nest_host - all of which is covered by comparing the two
441 // resolved_nest_hosts.
442 // Any exceptions (i.e. VMEs) are propagated.
443 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) {
444
445 assert(this != k, "this should be handled by higher-level code");
446
447 // Per JVMS 5.4.4 we first resolve and validate the current class, then
448 // the target class k.
449
450 InstanceKlass* cur_host = nest_host(CHECK_false);
451 if (cur_host == nullptr) {
452 return false;
453 }
454
455 Klass* k_nest_host = k->nest_host(CHECK_false);
456 if (k_nest_host == nullptr) {
457 return false;
458 }
459
460 bool access = (cur_host == k_nest_host);
461
462 ResourceMark rm(THREAD);
463 log_trace(class, nestmates)("Class %s does %shave nestmate access to %s",
464 this->external_name(),
465 access ? "" : "NOT ",
466 k->external_name());
467 return access;
468 }
469
470 const char* InstanceKlass::nest_host_error() {
471 if (_nest_host_index == 0) {
472 return nullptr;
473 } else {
474 constantPoolHandle cph(Thread::current(), constants());
475 return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index);
476 }
477 }
478
479 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) {
480 const int size = InstanceKlass::size(parser.vtable_size(),
481 parser.itable_size(),
482 nonstatic_oop_map_size(parser.total_oop_map_count()),
483 parser.is_interface(),
484 parser.is_inline_type());
485
486 const Symbol* const class_name = parser.class_name();
487 assert(class_name != nullptr, "invariant");
488 ClassLoaderData* loader_data = parser.loader_data();
489 assert(loader_data != nullptr, "invariant");
490
491 InstanceKlass* ik;
492
493 // Allocation
494 if (parser.is_instance_ref_klass()) {
495 // java.lang.ref.Reference
496 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser);
497 } else if (class_name == vmSymbols::java_lang_Class()) {
498 // mirror - java.lang.Class
499 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser);
500 } else if (is_stack_chunk_class(class_name, loader_data)) {
501 // stack chunk
502 ik = new (loader_data, size, THREAD) InstanceStackChunkKlass(parser);
503 } else if (is_class_loader(class_name, parser)) {
504 // class loader - java.lang.ClassLoader
505 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser);
506 } else if (parser.is_inline_type()) {
507 // inline type
508 ik = new (loader_data, size, THREAD) InlineKlass(parser);
509 } else {
510 // normal
511 ik = new (loader_data, size, THREAD) InstanceKlass(parser);
512 }
513
514 if (ik != nullptr && UseCompressedClassPointers) {
515 assert(CompressedKlassPointers::is_encodable(ik),
516 "Klass " PTR_FORMAT "needs a narrow Klass ID, but is not encodable", p2i(ik));
517 }
518
519 // Check for pending exception before adding to the loader data and incrementing
520 // class count. Can get OOM here.
521 if (HAS_PENDING_EXCEPTION) {
522 return nullptr;
523 }
524
525 #ifdef ASSERT
526 ik->bounds_check((address) ik->start_of_vtable(), false, size);
527 ik->bounds_check((address) ik->start_of_itable(), false, size);
528 ik->bounds_check((address) ik->end_of_itable(), true, size);
529 ik->bounds_check((address) ik->end_of_nonstatic_oop_maps(), true, size);
530 #endif //ASSERT
531 return ik;
532 }
533
534 #ifndef PRODUCT
535 bool InstanceKlass::bounds_check(address addr, bool edge_ok, intptr_t size_in_bytes) const {
536 const char* bad = nullptr;
537 address end = nullptr;
538 if (addr < (address)this) {
539 bad = "before";
540 } else if (addr == (address)this) {
541 if (edge_ok) return true;
542 bad = "just before";
543 } else if (addr == (end = (address)this + sizeof(intptr_t) * (size_in_bytes < 0 ? size() : size_in_bytes))) {
544 if (edge_ok) return true;
545 bad = "just after";
546 } else if (addr > end) {
547 bad = "after";
548 } else {
549 return true;
550 }
551 tty->print_cr("%s object bounds: " INTPTR_FORMAT " [" INTPTR_FORMAT ".." INTPTR_FORMAT "]",
552 bad, (intptr_t)addr, (intptr_t)this, (intptr_t)end);
553 Verbose = WizardMode = true; this->print(); //@@
554 return false;
555 }
556 #endif //PRODUCT
557
558 // copy method ordering from resource area to Metaspace
559 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) {
560 if (m != nullptr) {
561 // allocate a new array and copy contents (memcpy?)
562 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
563 for (int i = 0; i < m->length(); i++) {
564 _method_ordering->at_put(i, m->at(i));
565 }
566 } else {
567 _method_ordering = Universe::the_empty_int_array();
568 }
569 }
570
571 // create a new array of vtable_indices for default methods
572 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
573 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
574 assert(default_vtable_indices() == nullptr, "only create once");
575 set_default_vtable_indices(vtable_indices);
576 return vtable_indices;
577 }
578
579
580 InstanceKlass::InstanceKlass() {
581 assert(CDSConfig::is_dumping_static_archive() || CDSConfig::is_using_archive(), "only for CDS");
582 }
583
584 InstanceKlass::InstanceKlass(const ClassFileParser& parser, KlassKind kind, markWord prototype_header, ReferenceType reference_type) :
585 Klass(kind, prototype_header),
586 _nest_members(nullptr),
587 _nest_host(nullptr),
588 _permitted_subclasses(nullptr),
589 _record_components(nullptr),
590 _static_field_size(parser.static_field_size()),
591 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
592 _itable_len(parser.itable_size()),
593 _nest_host_index(0),
594 _init_state(allocated),
595 _reference_type(reference_type),
596 _init_thread(nullptr),
597 _inline_layout_info_array(nullptr),
598 _loadable_descriptors(nullptr),
599 _adr_inlineklass_fixed_block(nullptr)
600 {
601 set_vtable_length(parser.vtable_size());
602 set_access_flags(parser.access_flags());
603 if (parser.is_hidden()) set_is_hidden();
604 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
605 false));
606 if (parser.has_inline_fields()) {
607 set_has_inline_type_fields();
608 }
609
610 assert(nullptr == _methods, "underlying memory not zeroed?");
611 assert(is_instance_klass(), "is layout incorrect?");
612 assert(size_helper() == parser.layout_size(), "incorrect size_helper?");
613 }
614
615 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
616 Array<Method*>* methods) {
617 if (methods != nullptr && methods != Universe::the_empty_method_array() &&
618 !methods->in_aot_cache()) {
619 for (int i = 0; i < methods->length(); i++) {
620 Method* method = methods->at(i);
621 if (method == nullptr) continue; // maybe null if error processing
622 // Only want to delete methods that are not executing for RedefineClasses.
623 // The previous version will point to them so they're not totally dangling
624 assert (!method->on_stack(), "shouldn't be called with methods on stack");
625 MetadataFactory::free_metadata(loader_data, method);
626 }
627 MetadataFactory::free_array<Method*>(loader_data, methods);
628 }
629 }
630
631 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
632 const InstanceKlass* super_klass,
633 Array<InstanceKlass*>* local_interfaces,
634 Array<InstanceKlass*>* transitive_interfaces) {
635 // Only deallocate transitive interfaces if not empty, same as super class
636 // or same as local interfaces. See code in parseClassFile.
637 Array<InstanceKlass*>* ti = transitive_interfaces;
638 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
639 // check that the interfaces don't come from super class
640 Array<InstanceKlass*>* sti = (super_klass == nullptr) ? nullptr :
641 super_klass->transitive_interfaces();
642 if (ti != sti && ti != nullptr && !ti->in_aot_cache()) {
643 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
644 }
645 }
646
647 // local interfaces can be empty
648 if (local_interfaces != Universe::the_empty_instance_klass_array() &&
649 local_interfaces != nullptr && !local_interfaces->in_aot_cache()) {
650 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
651 }
652 }
653
654 void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
655 Array<RecordComponent*>* record_components) {
656 if (record_components != nullptr && !record_components->in_aot_cache()) {
657 for (int i = 0; i < record_components->length(); i++) {
658 RecordComponent* record_component = record_components->at(i);
659 MetadataFactory::free_metadata(loader_data, record_component);
660 }
661 MetadataFactory::free_array<RecordComponent*>(loader_data, record_components);
662 }
663 }
664
665 // This function deallocates the metadata and C heap pointers that the
666 // InstanceKlass points to.
667 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
668 // Orphan the mirror first, CMS thinks it's still live.
669 if (java_mirror() != nullptr) {
670 java_lang_Class::set_klass(java_mirror(), nullptr);
671 }
672
673 // Also remove mirror from handles
674 loader_data->remove_handle(_java_mirror);
675
676 // Need to take this class off the class loader data list.
677 loader_data->remove_class(this);
678
679 // The array_klass for this class is created later, after error handling.
680 // For class redefinition, we keep the original class so this scratch class
681 // doesn't have an array class. Either way, assert that there is nothing
682 // to deallocate.
683 assert(array_klasses() == nullptr, "array classes shouldn't be created for this class yet");
684
685 // Release C heap allocated data that this points to, which includes
686 // reference counting symbol names.
687 // Can't release the constant pool or MethodData C heap data here because the constant
688 // pool can be deallocated separately from the InstanceKlass for default methods and
689 // redefine classes. MethodData can also be released separately.
690 release_C_heap_structures(/* release_sub_metadata */ false);
691
692 deallocate_methods(loader_data, methods());
693 set_methods(nullptr);
694
695 deallocate_record_components(loader_data, record_components());
696 set_record_components(nullptr);
697
698 if (method_ordering() != nullptr &&
699 method_ordering() != Universe::the_empty_int_array() &&
700 !method_ordering()->in_aot_cache()) {
701 MetadataFactory::free_array<int>(loader_data, method_ordering());
702 }
703 set_method_ordering(nullptr);
704
705 // default methods can be empty
706 if (default_methods() != nullptr &&
707 default_methods() != Universe::the_empty_method_array() &&
708 !default_methods()->in_aot_cache()) {
709 MetadataFactory::free_array<Method*>(loader_data, default_methods());
710 }
711 // Do NOT deallocate the default methods, they are owned by superinterfaces.
712 set_default_methods(nullptr);
713
714 // default methods vtable indices can be empty
715 if (default_vtable_indices() != nullptr &&
716 !default_vtable_indices()->in_aot_cache()) {
717 MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
718 }
719 set_default_vtable_indices(nullptr);
720
721
722 // This array is in Klass, but remove it with the InstanceKlass since
723 // this place would be the only caller and it can share memory with transitive
724 // interfaces.
725 if (secondary_supers() != nullptr &&
726 secondary_supers() != Universe::the_empty_klass_array() &&
727 // see comments in compute_secondary_supers about the following cast
728 (address)(secondary_supers()) != (address)(transitive_interfaces()) &&
729 !secondary_supers()->in_aot_cache()) {
730 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
731 }
732 set_secondary_supers(nullptr, SECONDARY_SUPERS_BITMAP_EMPTY);
733
734 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
735 set_transitive_interfaces(nullptr);
736 set_local_interfaces(nullptr);
737
738 if (fieldinfo_stream() != nullptr && !fieldinfo_stream()->in_aot_cache()) {
739 MetadataFactory::free_array<u1>(loader_data, fieldinfo_stream());
740 }
741 set_fieldinfo_stream(nullptr);
742
743 if (fieldinfo_search_table() != nullptr && !fieldinfo_search_table()->in_aot_cache()) {
744 MetadataFactory::free_array<u1>(loader_data, fieldinfo_search_table());
745 }
746 set_fieldinfo_search_table(nullptr);
747
748 if (fields_status() != nullptr && !fields_status()->in_aot_cache()) {
749 MetadataFactory::free_array<FieldStatus>(loader_data, fields_status());
750 }
751 set_fields_status(nullptr);
752
753 if (inline_layout_info_array() != nullptr) {
754 MetadataFactory::free_array<InlineLayoutInfo>(loader_data, inline_layout_info_array());
755 }
756 set_inline_layout_info_array(nullptr);
757
758 // If a method from a redefined class is using this constant pool, don't
759 // delete it, yet. The new class's previous version will point to this.
760 if (constants() != nullptr) {
761 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
762 if (!constants()->in_aot_cache()) {
763 MetadataFactory::free_metadata(loader_data, constants());
764 }
765 // Delete any cached resolution errors for the constant pool
766 SystemDictionary::delete_resolution_error(constants());
767
768 set_constants(nullptr);
769 }
770
771 if (inner_classes() != nullptr &&
772 inner_classes() != Universe::the_empty_short_array() &&
773 !inner_classes()->in_aot_cache()) {
774 MetadataFactory::free_array<jushort>(loader_data, inner_classes());
775 }
776 set_inner_classes(nullptr);
777
778 if (nest_members() != nullptr &&
779 nest_members() != Universe::the_empty_short_array() &&
780 !nest_members()->in_aot_cache()) {
781 MetadataFactory::free_array<jushort>(loader_data, nest_members());
782 }
783 set_nest_members(nullptr);
784
785 if (permitted_subclasses() != nullptr &&
786 permitted_subclasses() != Universe::the_empty_short_array() &&
787 !permitted_subclasses()->in_aot_cache()) {
788 MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
789 }
790 set_permitted_subclasses(nullptr);
791
792 if (loadable_descriptors() != nullptr &&
793 loadable_descriptors() != Universe::the_empty_short_array() &&
794 !loadable_descriptors()->in_aot_cache()) {
795 MetadataFactory::free_array<jushort>(loader_data, loadable_descriptors());
796 }
797 set_loadable_descriptors(nullptr);
798
799 // We should deallocate the Annotations instance if it's not in shared spaces.
800 if (annotations() != nullptr && !annotations()->in_aot_cache()) {
801 MetadataFactory::free_metadata(loader_data, annotations());
802 }
803 set_annotations(nullptr);
804
805 SystemDictionaryShared::handle_class_unloading(this);
806
807 #if INCLUDE_CDS_JAVA_HEAP
808 if (CDSConfig::is_dumping_heap()) {
809 HeapShared::remove_scratch_objects(this);
810 }
811 #endif
812 }
813
814 bool InstanceKlass::is_record() const {
815 return _record_components != nullptr &&
816 is_final() &&
817 super() == vmClasses::Record_klass();
818 }
819
820 bool InstanceKlass::is_sealed() const {
821 return _permitted_subclasses != nullptr &&
822 _permitted_subclasses != Universe::the_empty_short_array();
823 }
824
825 // JLS 8.9: An enum class is either implicitly final and derives
826 // from java.lang.Enum, or else is implicitly sealed to its
827 // anonymous subclasses. This query detects both kinds.
828 // It does not validate the finality or
829 // sealing conditions: it merely checks for a super of Enum.
830 // This is sufficient for recognizing well-formed enums.
831 bool InstanceKlass::is_enum_subclass() const {
832 InstanceKlass* s = super();
833 return (s == vmClasses::Enum_klass() ||
834 (s != nullptr && s->super() == vmClasses::Enum_klass()));
835 }
836
837 bool InstanceKlass::should_be_initialized() const {
838 return !is_initialized();
839 }
840
841 klassItable InstanceKlass::itable() const {
842 return klassItable(const_cast<InstanceKlass*>(this));
843 }
844
845 // JVMTI spec thinks there are signers and protection domain in the
846 // instanceKlass. These accessors pretend these fields are there.
847 // The hprof specification also thinks these fields are in InstanceKlass.
848 oop InstanceKlass::protection_domain() const {
849 // return the protection_domain from the mirror
850 return java_lang_Class::protection_domain(java_mirror());
851 }
852
853 objArrayOop InstanceKlass::signers() const {
854 // return the signers from the mirror
855 return java_lang_Class::signers(java_mirror());
856 }
857
858 oop InstanceKlass::init_lock() const {
859 // return the init lock from the mirror
860 oop lock = java_lang_Class::init_lock(java_mirror());
861 // Prevent reordering with any access of initialization state
862 OrderAccess::loadload();
863 assert(lock != nullptr || !is_not_initialized(), // initialized or in_error state
864 "only fully initialized state can have a null lock");
865 return lock;
866 }
867
868 // Set the initialization lock to null so the object can be GC'ed. Any racing
869 // threads to get this lock will see a null lock and will not lock.
870 // That's okay because they all check for initialized state after getting
871 // the lock and return.
872 void InstanceKlass::fence_and_clear_init_lock() {
873 // make sure previous stores are all done, notably the init_state.
874 OrderAccess::storestore();
875 java_lang_Class::clear_init_lock(java_mirror());
876 assert(!is_not_initialized(), "class must be initialized now");
877 }
878
879
880 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
881 // process. The step comments refers to the procedure described in that section.
882 // Note: implementation moved to static method to expose the this pointer.
883 void InstanceKlass::initialize(TRAPS) {
884 if (this->should_be_initialized()) {
885 initialize_impl(CHECK);
886 // Note: at this point the class may be initialized
887 // OR it may be in the state of being initialized
888 // in case of recursive initialization!
889 } else {
890 assert(is_initialized(), "sanity check");
891 }
892 }
893
894 #ifdef ASSERT
895 void InstanceKlass::assert_no_clinit_will_run_for_aot_initialized_class() const {
896 assert(has_aot_initialized_mirror(), "must be");
897
898 InstanceKlass* s = super();
899 if (s != nullptr) {
900 DEBUG_ONLY(ResourceMark rm);
901 assert(s->is_initialized(), "super class %s of aot-inited class %s must have been initialized",
902 s->external_name(), external_name());
903 s->assert_no_clinit_will_run_for_aot_initialized_class();
904 }
905
906 Array<InstanceKlass*>* interfaces = local_interfaces();
907 int len = interfaces->length();
908 for (int i = 0; i < len; i++) {
909 InstanceKlass* intf = interfaces->at(i);
910 if (!intf->is_initialized()) {
911 ResourceMark rm;
912 // Note: an interface needs to be marked as is_initialized() only if
913 // - it has a <clinit>
914 // - it has declared a default method.
915 assert(!intf->interface_needs_clinit_execution_as_super(/*also_check_supers*/false),
916 "uninitialized super interface %s of aot-inited class %s must not have <clinit>",
917 intf->external_name(), external_name());
918 }
919 }
920 }
921 #endif
922
923 #if INCLUDE_CDS
924 void InstanceKlass::initialize_with_aot_initialized_mirror(TRAPS) {
925 assert(has_aot_initialized_mirror(), "must be");
926 assert(CDSConfig::is_loading_heap(), "must be");
927 assert(CDSConfig::is_using_aot_linked_classes(), "must be");
928 assert_no_clinit_will_run_for_aot_initialized_class();
929
930 if (is_initialized()) {
931 return;
932 }
933
934 if (is_runtime_setup_required()) {
935 // Need to take the slow path, which will call the runtimeSetup() function instead
936 // of <clinit>
937 initialize(CHECK);
938 return;
939 }
940 if (log_is_enabled(Info, aot, init)) {
941 ResourceMark rm;
942 log_info(aot, init)("%s (aot-inited)", external_name());
943 }
944
945 link_class(CHECK);
946
947 #ifdef ASSERT
948 {
949 Handle h_init_lock(THREAD, init_lock());
950 ObjectLocker ol(h_init_lock, THREAD);
951 assert(!is_initialized(), "sanity");
952 assert(!is_being_initialized(), "sanity");
953 assert(!is_in_error_state(), "sanity");
954 }
955 #endif
956
957 set_init_thread(THREAD);
958 set_initialization_state_and_notify(fully_initialized, CHECK);
959 }
960 #endif
961
962 bool InstanceKlass::verify_code(TRAPS) {
963 // 1) Verify the bytecodes
964 return Verifier::verify(this, should_verify_class(), THREAD);
965 }
966
967 static void load_classes_from_loadable_descriptors_attribute(InstanceKlass *ik, TRAPS) {
968 ResourceMark rm(THREAD);
969 if (ik->loadable_descriptors() != nullptr && PreloadClasses) {
970 HandleMark hm(THREAD);
971 for (int i = 0; i < ik->loadable_descriptors()->length(); i++) {
972 Symbol* sig = ik->constants()->symbol_at(ik->loadable_descriptors()->at(i));
973 if (!Signature::has_envelope(sig)) continue;
974 TempNewSymbol class_name = Signature::strip_envelope(sig);
975 if (class_name == ik->name()) continue;
976 log_info(class, preload)("Preloading of class %s during linking of class %s "
977 "because of the class is listed in the LoadableDescriptors attribute",
978 sig->as_C_string(), ik->name()->as_C_string());
979 oop loader = ik->class_loader();
980 Klass* klass = SystemDictionary::resolve_or_null(class_name,
981 Handle(THREAD, loader), THREAD);
982 if (HAS_PENDING_EXCEPTION) {
983 CLEAR_PENDING_EXCEPTION;
984 }
985 if (klass != nullptr) {
986 log_info(class, preload)("Preloading of class %s during linking of class %s "
987 "(cause: LoadableDescriptors attribute) succeeded",
988 class_name->as_C_string(), ik->name()->as_C_string());
989 if (!klass->is_inline_klass()) {
990 // Non value class are allowed by the current spec, but it could be an indication
991 // of an issue so let's log a warning
992 log_warning(class, preload)("Preloading of class %s during linking of class %s "
993 "(cause: LoadableDescriptors attribute) but loaded class is not a value class",
994 class_name->as_C_string(), ik->name()->as_C_string());
995 }
996 } else {
997 log_warning(class, preload)("Preloading of class %s during linking of class %s "
998 "(cause: LoadableDescriptors attribute) failed",
999 class_name->as_C_string(), ik->name()->as_C_string());
1000 }
1001 }
1002 }
1003 }
1004
1005 void InstanceKlass::link_class(TRAPS) {
1006 assert(is_loaded(), "must be loaded");
1007 if (!is_linked()) {
1008 link_class_impl(CHECK);
1009 }
1010 }
1011
1012 // Called to verify that a class can link during initialization, without
1013 // throwing a VerifyError.
1014 bool InstanceKlass::link_class_or_fail(TRAPS) {
1015 assert(is_loaded(), "must be loaded");
1016 if (!is_linked()) {
1017 link_class_impl(CHECK_false);
1018 }
1019 return is_linked();
1020 }
1021
1022 bool InstanceKlass::link_class_impl(TRAPS) {
1023 if (CDSConfig::is_dumping_static_archive() && SystemDictionaryShared::has_class_failed_verification(this)) {
1024 // This is for CDS static dump only -- we use the in_error_state to indicate that
1025 // the class has failed verification. Throwing the NoClassDefFoundError here is just
1026 // a convenient way to stop repeat attempts to verify the same (bad) class.
1027 //
1028 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
1029 // if we are executing Java code. This is not a problem for CDS dumping phase since
1030 // it doesn't execute any Java code.
1031 ResourceMark rm(THREAD);
1032 // Names are all known to be < 64k so we know this formatted message is not excessively large.
1033 Exceptions::fthrow(THREAD_AND_LOCATION,
1034 vmSymbols::java_lang_NoClassDefFoundError(),
1035 "Class %s, or one of its supertypes, failed class initialization",
1036 external_name());
1037 return false;
1038 }
1039 // return if already verified
1040 if (is_linked()) {
1041 return true;
1042 }
1043
1044 // Timing
1045 // timer handles recursion
1046 JavaThread* jt = THREAD;
1047
1048 // link super class before linking this class
1049 InstanceKlass* super_klass = super();
1050 if (super_klass != nullptr) {
1051 if (super_klass->is_interface()) { // check if super class is an interface
1052 ResourceMark rm(THREAD);
1053 // Names are all known to be < 64k so we know this formatted message is not excessively large.
1054 Exceptions::fthrow(
1055 THREAD_AND_LOCATION,
1056 vmSymbols::java_lang_IncompatibleClassChangeError(),
1057 "class %s has interface %s as super class",
1058 external_name(),
1059 super_klass->external_name()
1060 );
1061 return false;
1062 }
1063
1064 super_klass->link_class_impl(CHECK_false);
1065 }
1066
1067 // link all interfaces implemented by this class before linking this class
1068 Array<InstanceKlass*>* interfaces = local_interfaces();
1069 int num_interfaces = interfaces->length();
1070 for (int index = 0; index < num_interfaces; index++) {
1071 InstanceKlass* interk = interfaces->at(index);
1072 interk->link_class_impl(CHECK_false);
1073 }
1074
1075 if (EnableValhalla) {
1076 // Aggressively preloading all classes from the LoadableDescriptors attribute
1077 // so inline classes can be scalarized in the calling conventions computed below
1078 load_classes_from_loadable_descriptors_attribute(this, THREAD);
1079 assert(!HAS_PENDING_EXCEPTION, "Shouldn't have pending exceptions from call above");
1080 }
1081
1082 // in case the class is linked in the process of linking its superclasses
1083 if (is_linked()) {
1084 return true;
1085 }
1086
1087 // trace only the link time for this klass that includes
1088 // the verification time
1089 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
1090 ClassLoader::perf_class_link_selftime(),
1091 ClassLoader::perf_classes_linked(),
1092 jt->get_thread_stat()->perf_recursion_counts_addr(),
1093 jt->get_thread_stat()->perf_timers_addr(),
1094 PerfClassTraceTime::CLASS_LINK);
1095
1096 // verification & rewriting
1097 {
1098 HandleMark hm(THREAD);
1099 Handle h_init_lock(THREAD, init_lock());
1100 ObjectLocker ol(h_init_lock, jt);
1101 // rewritten will have been set if loader constraint error found
1102 // on an earlier link attempt
1103 // don't verify or rewrite if already rewritten
1104 //
1105
1106 if (!is_linked()) {
1107 if (!is_rewritten()) {
1108 if (in_aot_cache()) {
1109 assert(!verified_at_dump_time(), "must be");
1110 }
1111 {
1112 bool verify_ok = verify_code(THREAD);
1113 if (!verify_ok) {
1114 return false;
1115 }
1116 }
1117
1118 // Just in case a side-effect of verify linked this class already
1119 // (which can sometimes happen since the verifier loads classes
1120 // using custom class loaders, which are free to initialize things)
1121 if (is_linked()) {
1122 return true;
1123 }
1124
1125 // also sets rewritten
1126 rewrite_class(CHECK_false);
1127 } else if (in_aot_cache()) {
1128 SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
1129 }
1130
1131 // relocate jsrs and link methods after they are all rewritten
1132 link_methods(CHECK_false);
1133
1134 // Initialize the vtable and interface table after
1135 // methods have been rewritten since rewrite may
1136 // fabricate new Method*s.
1137 // also does loader constraint checking
1138 //
1139 // initialize_vtable and initialize_itable need to be rerun
1140 // for a shared class if
1141 // 1) the class is loaded by custom class loader or
1142 // 2) the class is loaded by built-in class loader but failed to add archived loader constraints or
1143 // 3) the class was not verified during dump time
1144 bool need_init_table = true;
1145 if (in_aot_cache() && verified_at_dump_time() &&
1146 SystemDictionaryShared::check_linking_constraints(THREAD, this)) {
1147 need_init_table = false;
1148 }
1149 if (need_init_table) {
1150 vtable().initialize_vtable_and_check_constraints(CHECK_false);
1151 itable().initialize_itable_and_check_constraints(CHECK_false);
1152 }
1153 #ifdef ASSERT
1154 vtable().verify(tty, true);
1155 // In case itable verification is ever added.
1156 // itable().verify(tty, true);
1157 #endif
1158 if (Universe::is_fully_initialized()) {
1159 DeoptimizationScope deopt_scope;
1160 {
1161 // Now mark all code that assumes the class is not linked.
1162 // Set state under the Compile_lock also.
1163 MutexLocker ml(THREAD, Compile_lock);
1164
1165 set_init_state(linked);
1166 CodeCache::mark_dependents_on(&deopt_scope, this);
1167 }
1168 // Perform the deopt handshake outside Compile_lock.
1169 deopt_scope.deoptimize_marked();
1170 } else {
1171 set_init_state(linked);
1172 }
1173 if (JvmtiExport::should_post_class_prepare()) {
1174 JvmtiExport::post_class_prepare(THREAD, this);
1175 }
1176 }
1177 }
1178 return true;
1179 }
1180
1181 // Rewrite the byte codes of all of the methods of a class.
1182 // The rewriter must be called exactly once. Rewriting must happen after
1183 // verification but before the first method of the class is executed.
1184 void InstanceKlass::rewrite_class(TRAPS) {
1185 assert(is_loaded(), "must be loaded");
1186 if (is_rewritten()) {
1187 assert(in_aot_cache(), "rewriting an unshared class?");
1188 return;
1189 }
1190 Rewriter::rewrite(this, CHECK);
1191 set_rewritten();
1192 }
1193
1194 // Now relocate and link method entry points after class is rewritten.
1195 // This is outside is_rewritten flag. In case of an exception, it can be
1196 // executed more than once.
1197 void InstanceKlass::link_methods(TRAPS) {
1198 PerfTraceTime timer(ClassLoader::perf_ik_link_methods_time());
1199
1200 int len = methods()->length();
1201 for (int i = len-1; i >= 0; i--) {
1202 methodHandle m(THREAD, methods()->at(i));
1203
1204 // Set up method entry points for compiler and interpreter .
1205 m->link_method(m, CHECK);
1206 }
1207 }
1208
1209 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
1210 void InstanceKlass::initialize_super_interfaces(TRAPS) {
1211 assert (has_nonstatic_concrete_methods(), "caller should have checked this");
1212 for (int i = 0; i < local_interfaces()->length(); ++i) {
1213 InstanceKlass* ik = local_interfaces()->at(i);
1214
1215 // Initialization is depth first search ie. we start with top of the inheritance tree
1216 // has_nonstatic_concrete_methods drives searching superinterfaces since it
1217 // means has_nonstatic_concrete_methods in its superinterface hierarchy
1218 if (ik->has_nonstatic_concrete_methods()) {
1219 ik->initialize_super_interfaces(CHECK);
1220 }
1221
1222 // Only initialize() interfaces that "declare" concrete methods.
1223 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
1224 ik->initialize(CHECK);
1225 }
1226 }
1227 }
1228
1229 using InitializationErrorTable = HashTable<const InstanceKlass*, OopHandle, 107, AnyObj::C_HEAP, mtClass>;
1230 static InitializationErrorTable* _initialization_error_table;
1231
1232 void InstanceKlass::add_initialization_error(JavaThread* current, Handle exception) {
1233 // Create the same exception with a message indicating the thread name,
1234 // and the StackTraceElements.
1235 Handle init_error = java_lang_Throwable::create_initialization_error(current, exception);
1236 ResourceMark rm(current);
1237 if (init_error.is_null()) {
1238 log_trace(class, init)("Unable to create the desired initialization error for class %s", external_name());
1239
1240 // We failed to create the new exception, most likely due to either out-of-memory or
1241 // a stackoverflow error. If the original exception was either of those then we save
1242 // the shared, pre-allocated, stackless, instance of that exception.
1243 if (exception->klass() == vmClasses::StackOverflowError_klass()) {
1244 log_debug(class, init)("Using shared StackOverflowError as initialization error for class %s", external_name());
1245 init_error = Handle(current, Universe::class_init_stack_overflow_error());
1246 } else if (exception->klass() == vmClasses::OutOfMemoryError_klass()) {
1247 log_debug(class, init)("Using shared OutOfMemoryError as initialization error for class %s", external_name());
1248 init_error = Handle(current, Universe::class_init_out_of_memory_error());
1249 } else {
1250 return;
1251 }
1252 }
1253
1254 MutexLocker ml(current, ClassInitError_lock);
1255 OopHandle elem = OopHandle(Universe::vm_global(), init_error());
1256 bool created;
1257 if (_initialization_error_table == nullptr) {
1258 _initialization_error_table = new (mtClass) InitializationErrorTable();
1259 }
1260 _initialization_error_table->put_if_absent(this, elem, &created);
1261 assert(created, "Initialization is single threaded");
1262 log_trace(class, init)("Initialization error added for class %s", external_name());
1263 }
1264
1265 oop InstanceKlass::get_initialization_error(JavaThread* current) {
1266 MutexLocker ml(current, ClassInitError_lock);
1267 if (_initialization_error_table == nullptr) {
1268 return nullptr;
1269 }
1270 OopHandle* h = _initialization_error_table->get(this);
1271 return (h != nullptr) ? h->resolve() : nullptr;
1272 }
1273
1274 // Need to remove entries for unloaded classes.
1275 void InstanceKlass::clean_initialization_error_table() {
1276 struct InitErrorTableCleaner {
1277 bool do_entry(const InstanceKlass* ik, OopHandle h) {
1278 if (!ik->is_loader_alive()) {
1279 h.release(Universe::vm_global());
1280 return true;
1281 } else {
1282 return false;
1283 }
1284 }
1285 };
1286
1287 assert_locked_or_safepoint(ClassInitError_lock);
1288 InitErrorTableCleaner cleaner;
1289 if (_initialization_error_table != nullptr) {
1290 _initialization_error_table->unlink(&cleaner);
1291 }
1292 }
1293
1294 void InstanceKlass::initialize_impl(TRAPS) {
1295 HandleMark hm(THREAD);
1296
1297 // Make sure klass is linked (verified) before initialization
1298 // A class could already be verified, since it has been reflected upon.
1299 link_class(CHECK);
1300
1301 DTRACE_CLASSINIT_PROBE(required, -1);
1302
1303 bool wait = false;
1304
1305 JavaThread* jt = THREAD;
1306
1307 bool debug_logging_enabled = log_is_enabled(Debug, class, init);
1308
1309 // refer to the JVM book page 47 for description of steps
1310 // Step 1
1311 {
1312 Handle h_init_lock(THREAD, init_lock());
1313 ObjectLocker ol(h_init_lock, jt);
1314
1315 // Step 2
1316 // If we were to use wait() instead of waitInterruptibly() then
1317 // we might end up throwing IE from link/symbol resolution sites
1318 // that aren't expected to throw. This would wreak havoc. See 6320309.
1319 while (is_being_initialized() && !is_reentrant_initialization(jt)) {
1320 if (debug_logging_enabled) {
1321 ResourceMark rm(jt);
1322 log_debug(class, init)("Thread \"%s\" waiting for initialization of %s by thread \"%s\"",
1323 jt->name(), external_name(), init_thread_name());
1324 }
1325 wait = true;
1326 jt->set_class_to_be_initialized(this);
1327 ol.wait_uninterruptibly(jt);
1328 jt->set_class_to_be_initialized(nullptr);
1329 }
1330
1331 // Step 3
1332 if (is_being_initialized() && is_reentrant_initialization(jt)) {
1333 if (debug_logging_enabled) {
1334 ResourceMark rm(jt);
1335 log_debug(class, init)("Thread \"%s\" recursively initializing %s",
1336 jt->name(), external_name());
1337 }
1338 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
1339 return;
1340 }
1341
1342 // Step 4
1343 if (is_initialized()) {
1344 if (debug_logging_enabled) {
1345 ResourceMark rm(jt);
1346 log_debug(class, init)("Thread \"%s\" found %s already initialized",
1347 jt->name(), external_name());
1348 }
1349 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
1350 return;
1351 }
1352
1353 // Step 5
1354 if (is_in_error_state()) {
1355 if (debug_logging_enabled) {
1356 ResourceMark rm(jt);
1357 log_debug(class, init)("Thread \"%s\" found %s is in error state",
1358 jt->name(), external_name());
1359 }
1360
1361 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
1362 ResourceMark rm(THREAD);
1363 Handle cause(THREAD, get_initialization_error(THREAD));
1364
1365 stringStream ss;
1366 ss.print("Could not initialize class %s", external_name());
1367 if (cause.is_null()) {
1368 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string());
1369 } else {
1370 THROW_MSG_CAUSE(vmSymbols::java_lang_NoClassDefFoundError(),
1371 ss.as_string(), cause);
1372 }
1373 } else {
1374
1375 // Step 6
1376 set_init_state(being_initialized);
1377 set_init_thread(jt);
1378 if (debug_logging_enabled) {
1379 ResourceMark rm(jt);
1380 log_debug(class, init)("Thread \"%s\" is initializing %s",
1381 jt->name(), external_name());
1382 }
1383 }
1384 }
1385
1386 // Pre-allocating an all-zero value to be used to reset nullable flat storages
1387 if (is_inline_klass()) {
1388 InlineKlass* vk = InlineKlass::cast(this);
1389 if (vk->has_nullable_atomic_layout()) {
1390 oop val = vk->allocate_instance(THREAD);
1391 if (HAS_PENDING_EXCEPTION) {
1392 Handle e(THREAD, PENDING_EXCEPTION);
1393 CLEAR_PENDING_EXCEPTION;
1394 {
1395 EXCEPTION_MARK;
1396 add_initialization_error(THREAD, e);
1397 // Locks object, set state, and notify all waiting threads
1398 set_initialization_state_and_notify(initialization_error, THREAD);
1399 CLEAR_PENDING_EXCEPTION;
1400 }
1401 THROW_OOP(e());
1402 }
1403 vk->set_null_reset_value(val);
1404 }
1405 }
1406
1407 // Step 7
1408 // Next, if C is a class rather than an interface, initialize it's super class and super
1409 // interfaces.
1410 if (!is_interface()) {
1411 Klass* super_klass = super();
1412 if (super_klass != nullptr && super_klass->should_be_initialized()) {
1413 super_klass->initialize(THREAD);
1414 }
1415 // If C implements any interface that declares a non-static, concrete method,
1416 // the initialization of C triggers initialization of its super interfaces.
1417 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
1418 // having a superinterface that declares, non-static, concrete methods
1419 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) {
1420 initialize_super_interfaces(THREAD);
1421 }
1422
1423 // If any exceptions, complete abruptly, throwing the same exception as above.
1424 if (HAS_PENDING_EXCEPTION) {
1425 Handle e(THREAD, PENDING_EXCEPTION);
1426 CLEAR_PENDING_EXCEPTION;
1427 {
1428 EXCEPTION_MARK;
1429 add_initialization_error(THREAD, e);
1430 // Locks object, set state, and notify all waiting threads
1431 set_initialization_state_and_notify(initialization_error, THREAD);
1432 CLEAR_PENDING_EXCEPTION;
1433 }
1434 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1435 THROW_OOP(e());
1436 }
1437 }
1438
1439 // Step 8
1440 {
1441 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1442 if (class_initializer() != nullptr) {
1443 // Timer includes any side effects of class initialization (resolution,
1444 // etc), but not recursive entry into call_class_initializer().
1445 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1446 ClassLoader::perf_class_init_selftime(),
1447 ClassLoader::perf_classes_inited(),
1448 jt->get_thread_stat()->perf_recursion_counts_addr(),
1449 jt->get_thread_stat()->perf_timers_addr(),
1450 PerfClassTraceTime::CLASS_CLINIT);
1451 call_class_initializer(THREAD);
1452 } else {
1453 // The elapsed time is so small it's not worth counting.
1454 if (UsePerfData) {
1455 ClassLoader::perf_classes_inited()->inc();
1456 }
1457 call_class_initializer(THREAD);
1458 }
1459
1460 if (has_strict_static_fields() && !HAS_PENDING_EXCEPTION) {
1461 // Step 9 also verifies that strict static fields have been initialized.
1462 // Status bits were set in ClassFileParser::post_process_parsed_stream.
1463 // After <clinit>, bits must all be clear, or else we must throw an error.
1464 // This is an extremely fast check, so we won't bother with a timer.
1465 assert(fields_status() != nullptr, "");
1466 Symbol* bad_strict_static = nullptr;
1467 for (int index = 0; index < fields_status()->length(); index++) {
1468 // Very fast loop over single byte array looking for a set bit.
1469 if (fields_status()->adr_at(index)->is_strict_static_unset()) {
1470 // This strict static field has not been set by the class initializer.
1471 // Note that in the common no-error case, we read no field metadata.
1472 // We only unpack it when we need to report an error.
1473 FieldInfo fi = field(index);
1474 bad_strict_static = fi.name(constants());
1475 if (debug_logging_enabled) {
1476 ResourceMark rm(jt);
1477 const char* msg = format_strict_static_message(bad_strict_static);
1478 log_debug(class, init)("%s", msg);
1479 } else {
1480 // If we are not logging, do not bother to look for a second offense.
1481 break;
1482 }
1483 }
1484 }
1485 if (bad_strict_static != nullptr) {
1486 throw_strict_static_exception(bad_strict_static, "is unset after initialization of", THREAD);
1487 }
1488 }
1489 }
1490
1491 // Step 9
1492 if (!HAS_PENDING_EXCEPTION) {
1493 set_initialization_state_and_notify(fully_initialized, CHECK);
1494 DEBUG_ONLY(vtable().verify(tty, true);)
1495 CompilationPolicy::replay_training_at_init(this, THREAD);
1496 }
1497 else {
1498 // Step 10 and 11
1499 Handle e(THREAD, PENDING_EXCEPTION);
1500 CLEAR_PENDING_EXCEPTION;
1501 // JVMTI has already reported the pending exception
1502 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1503 JvmtiExport::clear_detected_exception(jt);
1504 {
1505 EXCEPTION_MARK;
1506 add_initialization_error(THREAD, e);
1507 set_initialization_state_and_notify(initialization_error, THREAD);
1508 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below
1509 // JVMTI has already reported the pending exception
1510 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1511 JvmtiExport::clear_detected_exception(jt);
1512 }
1513 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1514 if (e->is_a(vmClasses::Error_klass())) {
1515 THROW_OOP(e());
1516 } else {
1517 JavaCallArguments args(e);
1518 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
1519 vmSymbols::throwable_void_signature(),
1520 &args);
1521 }
1522 }
1523 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1524 }
1525
1526
1527 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
1528 Handle h_init_lock(THREAD, init_lock());
1529 if (h_init_lock() != nullptr) {
1530 ObjectLocker ol(h_init_lock, THREAD);
1531 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1532 set_init_state(state);
1533 fence_and_clear_init_lock();
1534 ol.notify_all(CHECK);
1535 } else {
1536 assert(h_init_lock() != nullptr, "The initialization state should never be set twice");
1537 set_init_thread(nullptr); // reset _init_thread before changing _init_state
1538 set_init_state(state);
1539 }
1540 }
1541
1542 void InstanceKlass::notify_strict_static_access(int field_index, bool is_writing, TRAPS) {
1543 guarantee(field_index >= 0 && field_index < fields_status()->length(), "valid field index");
1544 DEBUG_ONLY(FieldInfo debugfi = field(field_index));
1545 assert(debugfi.access_flags().is_strict(), "");
1546 assert(debugfi.access_flags().is_static(), "");
1547 FieldStatus& fs = *fields_status()->adr_at(field_index);
1548 LogTarget(Trace, class, init) lt;
1549 if (lt.is_enabled()) {
1550 ResourceMark rm(THREAD);
1551 LogStream ls(lt);
1552 FieldInfo fi = field(field_index);
1553 ls.print("notify %s %s %s%s ",
1554 external_name(), is_writing? "Write" : "Read",
1555 fs.is_strict_static_unset() ? "Unset" : "(set)",
1556 fs.is_strict_static_unread() ? "+Unread" : "");
1557 fi.print(&ls, constants());
1558 }
1559 if (fs.is_strict_static_unset()) {
1560 assert(fs.is_strict_static_unread(), "ClassFileParser resp.");
1561 // If it is not set, there are only two reasonable things we can do here:
1562 // - mark it set if this is putstatic
1563 // - throw an error (Read-Before-Write) if this is getstatic
1564
1565 // The unset state is (or should be) transient, and observable only in one
1566 // thread during the execution of <clinit>. Something is wrong here as this
1567 // should not be possible
1568 guarantee(is_reentrant_initialization(THREAD), "unscoped access to strict static");
1569 if (is_writing) {
1570 // clear the "unset" bit, since the field is actually going to be written
1571 fs.update_strict_static_unset(false);
1572 } else {
1573 // throw an IllegalStateException, since we are reading before writing
1574 // see also InstanceKlass::initialize_impl, Step 8 (at end)
1575 Symbol* bad_strict_static = field(field_index).name(constants());
1576 throw_strict_static_exception(bad_strict_static, "is unset before first read in", CHECK);
1577 }
1578 } else {
1579 // Ensure no write after read for final strict statics
1580 FieldInfo fi = field(field_index);
1581 bool is_final = fi.access_flags().is_final();
1582 if (is_final) {
1583 // no final write after read, so observing a constant freezes it, as if <clinit> ended early
1584 // (maybe we could trust the constant a little earlier, before <clinit> ends)
1585 if (is_writing && !fs.is_strict_static_unread()) {
1586 Symbol* bad_strict_static = fi.name(constants());
1587 throw_strict_static_exception(bad_strict_static, "is set after read (as final) in", CHECK);
1588 } else if (!is_writing && fs.is_strict_static_unread()) {
1589 fs.update_strict_static_unread(false);
1590 }
1591 }
1592 }
1593 }
1594
1595 void InstanceKlass::throw_strict_static_exception(Symbol* field_name, const char* when, TRAPS) {
1596 ResourceMark rm(THREAD);
1597 const char* msg = format_strict_static_message(field_name, when);
1598 THROW_MSG(vmSymbols::java_lang_IllegalStateException(), msg);
1599 }
1600
1601 const char* InstanceKlass::format_strict_static_message(Symbol* field_name, const char* when) {
1602 stringStream ss;
1603 ss.print("Strict static \"%s\" %s %s",
1604 field_name->as_C_string(),
1605 when == nullptr ? "is unset in" : when,
1606 external_name());
1607 return ss.as_string();
1608 }
1609
1610 // Update hierarchy. This is done before the new klass has been added to the SystemDictionary. The Compile_lock
1611 // is grabbed, to ensure that the compiler is not using the class hierarchy.
1612 void InstanceKlass::add_to_hierarchy(JavaThread* current) {
1613 assert(!SafepointSynchronize::is_at_safepoint(), "must NOT be at safepoint");
1614
1615 DeoptimizationScope deopt_scope;
1616 {
1617 MutexLocker ml(current, Compile_lock);
1618
1619 set_init_state(InstanceKlass::loaded);
1620 // make sure init_state store is already done.
1621 // The compiler reads the hierarchy outside of the Compile_lock.
1622 // Access ordering is used to add to hierarchy.
1623
1624 // Link into hierarchy.
1625 append_to_sibling_list(); // add to superklass/sibling list
1626 process_interfaces(); // handle all "implements" declarations
1627
1628 // Now mark all code that depended on old class hierarchy.
1629 // Note: must be done *after* linking k into the hierarchy (was bug 12/9/97)
1630 if (Universe::is_fully_initialized()) {
1631 CodeCache::mark_dependents_on(&deopt_scope, this);
1632 }
1633 }
1634 // Perform the deopt handshake outside Compile_lock.
1635 deopt_scope.deoptimize_marked();
1636 }
1637
1638
1639 InstanceKlass* InstanceKlass::implementor() const {
1640 InstanceKlass* volatile* ik = adr_implementor();
1641 if (ik == nullptr) {
1642 return nullptr;
1643 } else {
1644 // This load races with inserts, and therefore needs acquire.
1645 InstanceKlass* ikls = AtomicAccess::load_acquire(ik);
1646 if (ikls != nullptr && !ikls->is_loader_alive()) {
1647 return nullptr; // don't return unloaded class
1648 } else {
1649 return ikls;
1650 }
1651 }
1652 }
1653
1654
1655 void InstanceKlass::set_implementor(InstanceKlass* ik) {
1656 assert_locked_or_safepoint(Compile_lock);
1657 assert(is_interface(), "not interface");
1658 InstanceKlass* volatile* addr = adr_implementor();
1659 assert(addr != nullptr, "null addr");
1660 if (addr != nullptr) {
1661 AtomicAccess::release_store(addr, ik);
1662 }
1663 }
1664
1665 int InstanceKlass::nof_implementors() const {
1666 InstanceKlass* ik = implementor();
1667 if (ik == nullptr) {
1668 return 0;
1669 } else if (ik != this) {
1670 return 1;
1671 } else {
1672 return 2;
1673 }
1674 }
1675
1676 // The embedded _implementor field can only record one implementor.
1677 // When there are more than one implementors, the _implementor field
1678 // is set to the interface Klass* itself. Following are the possible
1679 // values for the _implementor field:
1680 // null - no implementor
1681 // implementor Klass* - one implementor
1682 // self - more than one implementor
1683 //
1684 // The _implementor field only exists for interfaces.
1685 void InstanceKlass::add_implementor(InstanceKlass* ik) {
1686 if (Universe::is_fully_initialized()) {
1687 assert_lock_strong(Compile_lock);
1688 }
1689 assert(is_interface(), "not interface");
1690 // Filter out my subinterfaces.
1691 // (Note: Interfaces are never on the subklass list.)
1692 if (ik->is_interface()) return;
1693
1694 // Filter out subclasses whose supers already implement me.
1695 // (Note: CHA must walk subclasses of direct implementors
1696 // in order to locate indirect implementors.)
1697 InstanceKlass* super_ik = ik->super();
1698 if (super_ik != nullptr && super_ik->implements_interface(this))
1699 // We only need to check one immediate superclass, since the
1700 // implements_interface query looks at transitive_interfaces.
1701 // Any supers of the super have the same (or fewer) transitive_interfaces.
1702 return;
1703
1704 InstanceKlass* iklass = implementor();
1705 if (iklass == nullptr) {
1706 set_implementor(ik);
1707 } else if (iklass != this && iklass != ik) {
1708 // There is already an implementor. Use itself as an indicator of
1709 // more than one implementors.
1710 set_implementor(this);
1711 }
1712
1713 // The implementor also implements the transitive_interfaces
1714 for (int index = 0; index < local_interfaces()->length(); index++) {
1715 local_interfaces()->at(index)->add_implementor(ik);
1716 }
1717 }
1718
1719 void InstanceKlass::init_implementor() {
1720 if (is_interface()) {
1721 set_implementor(nullptr);
1722 }
1723 }
1724
1725
1726 void InstanceKlass::process_interfaces() {
1727 // link this class into the implementors list of every interface it implements
1728 for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1729 assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
1730 InstanceKlass* interf = local_interfaces()->at(i);
1731 assert(interf->is_interface(), "expected interface");
1732 interf->add_implementor(this);
1733 }
1734 }
1735
1736 bool InstanceKlass::can_be_primary_super_slow() const {
1737 if (is_interface())
1738 return false;
1739 else
1740 return Klass::can_be_primary_super_slow();
1741 }
1742
1743 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1744 Array<InstanceKlass*>* transitive_interfaces) {
1745 // The secondaries are the implemented interfaces.
1746 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1747 // (but it's safe to do here because we won't write into _secondary_supers from this point on).
1748 Array<Klass*>* interfaces = (Array<Klass*>*)(address)transitive_interfaces;
1749 int num_secondaries = num_extra_slots + interfaces->length();
1750 if (num_secondaries == 0) {
1751 // Must share this for correct bootstrapping!
1752 set_secondary_supers(Universe::the_empty_klass_array(), Universe::the_empty_klass_bitmap());
1753 return nullptr;
1754 } else if (num_extra_slots == 0 && interfaces->length() <= 1) {
1755 // We will reuse the transitive interfaces list if we're certain
1756 // it's in hash order.
1757 uintx bitmap = compute_secondary_supers_bitmap(interfaces);
1758 set_secondary_supers(interfaces, bitmap);
1759 return nullptr;
1760 }
1761 // Copy transitive interfaces to a temporary growable array to be constructed
1762 // into the secondary super list with extra slots.
1763 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1764 for (int i = 0; i < interfaces->length(); i++) {
1765 secondaries->push(interfaces->at(i));
1766 }
1767 return secondaries;
1768 }
1769
1770 bool InstanceKlass::implements_interface(Klass* k) const {
1771 if (this == k) return true;
1772 assert(k->is_interface(), "should be an interface class");
1773 for (int i = 0; i < transitive_interfaces()->length(); i++) {
1774 if (transitive_interfaces()->at(i) == k) {
1775 return true;
1776 }
1777 }
1778 return false;
1779 }
1780
1781 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1782 // Verify direct super interface
1783 if (this == k) return true;
1784 assert(k->is_interface(), "should be an interface class");
1785 for (int i = 0; i < local_interfaces()->length(); i++) {
1786 if (local_interfaces()->at(i) == k) {
1787 return true;
1788 }
1789 }
1790 return false;
1791 }
1792
1793 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1794 if (TraceFinalizerRegistration) {
1795 tty->print("Registered ");
1796 i->print_value_on(tty);
1797 tty->print_cr(" (" PTR_FORMAT ") as finalizable", p2i(i));
1798 }
1799 instanceHandle h_i(THREAD, i);
1800 // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1801 JavaValue result(T_VOID);
1802 JavaCallArguments args(h_i);
1803 methodHandle mh(THREAD, Universe::finalizer_register_method());
1804 JavaCalls::call(&result, mh, &args, CHECK_NULL);
1805 MANAGEMENT_ONLY(FinalizerService::on_register(h_i(), THREAD);)
1806 return h_i();
1807 }
1808
1809 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1810 assert(!is_abstract() && !is_interface(), "Should not create this object");
1811 size_t size = size_helper(); // Query before forming handle.
1812 return (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL);
1813 }
1814
1815 instanceOop InstanceKlass::allocate_instance(oop java_class, TRAPS) {
1816 Klass* k = java_lang_Class::as_Klass(java_class);
1817 if (k == nullptr) {
1818 ResourceMark rm(THREAD);
1819 THROW_(vmSymbols::java_lang_InstantiationException(), nullptr);
1820 }
1821 InstanceKlass* ik = cast(k);
1822 ik->check_valid_for_instantiation(false, CHECK_NULL);
1823 ik->initialize(CHECK_NULL);
1824 return ik->allocate_instance(THREAD);
1825 }
1826
1827 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) {
1828 return instanceHandle(THREAD, allocate_instance(THREAD));
1829 }
1830
1831 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1832 if (is_interface() || is_abstract()) {
1833 ResourceMark rm(THREAD);
1834 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1835 : vmSymbols::java_lang_InstantiationException(), external_name());
1836 }
1837 if (this == vmClasses::Class_klass()) {
1838 ResourceMark rm(THREAD);
1839 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1840 : vmSymbols::java_lang_IllegalAccessException(), external_name());
1841 }
1842 }
1843
1844 ArrayKlass* InstanceKlass::array_klass(int n, TRAPS) {
1845 // Need load-acquire for lock-free read
1846 if (array_klasses_acquire() == nullptr) {
1847
1848 // Recursively lock array allocation
1849 RecursiveLocker rl(MultiArray_lock, THREAD);
1850
1851 // Check if another thread created the array klass while we were waiting for the lock.
1852 if (array_klasses() == nullptr) {
1853 ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL);
1854 // use 'release' to pair with lock-free load
1855 release_set_array_klasses(k);
1856 }
1857 }
1858
1859 // array_klasses() will always be set at this point
1860 ArrayKlass* ak = array_klasses();
1861 assert(ak != nullptr, "should be set");
1862 return ak->array_klass(n, THREAD);
1863 }
1864
1865 ArrayKlass* InstanceKlass::array_klass_or_null(int n) {
1866 // Need load-acquire for lock-free read
1867 ArrayKlass* ak = array_klasses_acquire();
1868 if (ak == nullptr) {
1869 return nullptr;
1870 } else {
1871 return ak->array_klass_or_null(n);
1872 }
1873 }
1874
1875 ArrayKlass* InstanceKlass::array_klass(TRAPS) {
1876 return array_klass(1, THREAD);
1877 }
1878
1879 ArrayKlass* InstanceKlass::array_klass_or_null() {
1880 return array_klass_or_null(1);
1881 }
1882
1883 static int call_class_initializer_counter = 0; // for debugging
1884
1885 Method* InstanceKlass::class_initializer() const {
1886 Method* clinit = find_method(
1887 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1888 if (clinit != nullptr && clinit->is_class_initializer()) {
1889 return clinit;
1890 }
1891 return nullptr;
1892 }
1893
1894 void InstanceKlass::call_class_initializer(TRAPS) {
1895 if (ReplayCompiles &&
1896 (ReplaySuppressInitializers == 1 ||
1897 (ReplaySuppressInitializers >= 2 && class_loader() != nullptr))) {
1898 // Hide the existence of the initializer for the purpose of replaying the compile
1899 return;
1900 }
1901
1902 #if INCLUDE_CDS
1903 // This is needed to ensure the consistency of the archived heap objects.
1904 if (has_aot_initialized_mirror() && CDSConfig::is_loading_heap()) {
1905 AOTClassInitializer::call_runtime_setup(THREAD, this);
1906 return;
1907 } else if (has_archived_enum_objs()) {
1908 assert(in_aot_cache(), "must be");
1909 bool initialized = CDSEnumKlass::initialize_enum_klass(this, CHECK);
1910 if (initialized) {
1911 return;
1912 }
1913 }
1914 #endif
1915
1916 methodHandle h_method(THREAD, class_initializer());
1917 assert(!is_initialized(), "we cannot initialize twice");
1918 LogTarget(Info, class, init) lt;
1919 if (lt.is_enabled()) {
1920 ResourceMark rm(THREAD);
1921 LogStream ls(lt);
1922 ls.print("%d Initializing ", call_class_initializer_counter++);
1923 name()->print_value_on(&ls);
1924 ls.print_cr("%s (" PTR_FORMAT ") by thread \"%s\"",
1925 h_method() == nullptr ? "(no method)" : "", p2i(this),
1926 THREAD->name());
1927 }
1928 if (h_method() != nullptr) {
1929 ThreadInClassInitializer ticl(THREAD, this); // Track class being initialized
1930 JavaCallArguments args; // No arguments
1931 JavaValue result(T_VOID);
1932 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1933 }
1934 }
1935
1936 // If a class that implements this interface is initialized, is the JVM required
1937 // to first execute a <clinit> method declared in this interface,
1938 // or (if also_check_supers==true) any of the super types of this interface?
1939 //
1940 // JVMS 5.5. Initialization, step 7: Next, if C is a class rather than
1941 // an interface, then let SC be its superclass and let SI1, ..., SIn
1942 // be all superinterfaces of C (whether direct or indirect) that
1943 // declare at least one non-abstract, non-static method.
1944 //
1945 // So when an interface is initialized, it does not look at its
1946 // supers. But a proper class will ensure that all of its supers have
1947 // run their <clinit> methods, except that it disregards interfaces
1948 // that lack a non-static concrete method (i.e., a default method).
1949 // Therefore, you should probably call this method only when the
1950 // current class is a super of some proper class, not an interface.
1951 bool InstanceKlass::interface_needs_clinit_execution_as_super(bool also_check_supers) const {
1952 assert(is_interface(), "must be");
1953
1954 if (!has_nonstatic_concrete_methods()) {
1955 // quick check: no nonstatic concrete methods are declared by this or any super interfaces
1956 return false;
1957 }
1958
1959 // JVMS 5.5. Initialization
1960 // ...If C is an interface that declares a non-abstract,
1961 // non-static method, the initialization of a class that
1962 // implements C directly or indirectly.
1963 if (declares_nonstatic_concrete_methods() && class_initializer() != nullptr) {
1964 return true;
1965 }
1966 if (also_check_supers) {
1967 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
1968 for (int i = 0; i < all_ifs->length(); ++i) {
1969 InstanceKlass* super_intf = all_ifs->at(i);
1970 if (super_intf->declares_nonstatic_concrete_methods() && super_intf->class_initializer() != nullptr) {
1971 return true;
1972 }
1973 }
1974 }
1975 return false;
1976 }
1977
1978 void InstanceKlass::mask_for(const methodHandle& method, int bci,
1979 InterpreterOopMap* entry_for) {
1980 // Lazily create the _oop_map_cache at first request.
1981 // Load_acquire is needed to safely get instance published with CAS by another thread.
1982 OopMapCache* oop_map_cache = AtomicAccess::load_acquire(&_oop_map_cache);
1983 if (oop_map_cache == nullptr) {
1984 // Try to install new instance atomically.
1985 oop_map_cache = new OopMapCache();
1986 OopMapCache* other = AtomicAccess::cmpxchg(&_oop_map_cache, (OopMapCache*)nullptr, oop_map_cache);
1987 if (other != nullptr) {
1988 // Someone else managed to install before us, ditch local copy and use the existing one.
1989 delete oop_map_cache;
1990 oop_map_cache = other;
1991 }
1992 }
1993 // _oop_map_cache is constant after init; lookup below does its own locking.
1994 oop_map_cache->lookup(method, bci, entry_for);
1995 }
1996
1997
1998 FieldInfo InstanceKlass::field(int index) const {
1999 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2000 if (fs.index() == index) {
2001 return fs.to_FieldInfo();
2002 }
2003 }
2004 fatal("Field not found");
2005 return FieldInfo();
2006 }
2007
2008 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2009 JavaFieldStream fs(this);
2010 if (fs.lookup(name, sig)) {
2011 assert(fs.name() == name, "name must match");
2012 assert(fs.signature() == sig, "signature must match");
2013 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
2014 return true;
2015 }
2016 return false;
2017 }
2018
2019
2020 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2021 const int n = local_interfaces()->length();
2022 for (int i = 0; i < n; i++) {
2023 InstanceKlass* intf1 = local_interfaces()->at(i);
2024 assert(intf1->is_interface(), "just checking type");
2025 // search for field in current interface
2026 if (intf1->find_local_field(name, sig, fd)) {
2027 assert(fd->is_static(), "interface field must be static");
2028 return intf1;
2029 }
2030 // search for field in direct superinterfaces
2031 Klass* intf2 = intf1->find_interface_field(name, sig, fd);
2032 if (intf2 != nullptr) return intf2;
2033 }
2034 // otherwise field lookup fails
2035 return nullptr;
2036 }
2037
2038
2039 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
2040 // search order according to newest JVM spec (5.4.3.2, p.167).
2041 // 1) search for field in current klass
2042 if (find_local_field(name, sig, fd)) {
2043 return const_cast<InstanceKlass*>(this);
2044 }
2045 // 2) search for field recursively in direct superinterfaces
2046 { Klass* intf = find_interface_field(name, sig, fd);
2047 if (intf != nullptr) return intf;
2048 }
2049 // 3) apply field lookup recursively if superclass exists
2050 { InstanceKlass* supr = super();
2051 if (supr != nullptr) return supr->find_field(name, sig, fd);
2052 }
2053 // 4) otherwise field lookup fails
2054 return nullptr;
2055 }
2056
2057
2058 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
2059 // search order according to newest JVM spec (5.4.3.2, p.167).
2060 // 1) search for field in current klass
2061 if (find_local_field(name, sig, fd)) {
2062 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
2063 }
2064 // 2) search for field recursively in direct superinterfaces
2065 if (is_static) {
2066 Klass* intf = find_interface_field(name, sig, fd);
2067 if (intf != nullptr) return intf;
2068 }
2069 // 3) apply field lookup recursively if superclass exists
2070 { InstanceKlass* supr = super();
2071 if (supr != nullptr) return supr->find_field(name, sig, is_static, fd);
2072 }
2073 // 4) otherwise field lookup fails
2074 return nullptr;
2075 }
2076
2077 bool InstanceKlass::contains_field_offset(int offset) {
2078 if (this->is_inline_klass()) {
2079 InlineKlass* vk = InlineKlass::cast(this);
2080 return offset >= vk->payload_offset() && offset < (vk->payload_offset() + vk->payload_size_in_bytes());
2081 } else {
2082 fieldDescriptor fd;
2083 return find_field_from_offset(offset, false, &fd);
2084 }
2085 }
2086
2087 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
2088 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2089 if (fs.offset() == offset) {
2090 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.to_FieldInfo());
2091 if (fd->is_static() == is_static) return true;
2092 }
2093 }
2094 return false;
2095 }
2096
2097
2098 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
2099 const InstanceKlass* klass = this;
2100 while (klass != nullptr) {
2101 if (klass->find_local_field_from_offset(offset, is_static, fd)) {
2102 return true;
2103 }
2104 klass = klass->super();
2105 }
2106 return false;
2107 }
2108
2109
2110 void InstanceKlass::methods_do(void f(Method* method)) {
2111 // Methods aren't stable until they are loaded. This can be read outside
2112 // a lock through the ClassLoaderData for profiling
2113 // Redefined scratch classes are on the list and need to be cleaned
2114 if (!is_loaded() && !is_scratch_class()) {
2115 return;
2116 }
2117
2118 int len = methods()->length();
2119 for (int index = 0; index < len; index++) {
2120 Method* m = methods()->at(index);
2121 assert(m->is_method(), "must be method");
2122 f(m);
2123 }
2124 }
2125
2126
2127 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
2128 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2129 if (fs.access_flags().is_static()) {
2130 fieldDescriptor& fd = fs.field_descriptor();
2131 cl->do_field(&fd);
2132 }
2133 }
2134 }
2135
2136
2137 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
2138 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2139 if (fs.access_flags().is_static()) {
2140 fieldDescriptor& fd = fs.field_descriptor();
2141 f(&fd, mirror, CHECK);
2142 }
2143 }
2144 }
2145
2146 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
2147 InstanceKlass* super = this->super();
2148 if (super != nullptr) {
2149 super->do_nonstatic_fields(cl);
2150 }
2151 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
2152 fieldDescriptor& fd = fs.field_descriptor();
2153 if (!fd.is_static()) {
2154 cl->do_field(&fd);
2155 }
2156 }
2157 }
2158
2159 static int compare_fields_by_offset(FieldInfo* a, FieldInfo* b) {
2160 return a->offset() - b->offset();
2161 }
2162
2163 void InstanceKlass::print_nonstatic_fields(FieldClosure* cl) {
2164 InstanceKlass* super = this->super();
2165 if (super != nullptr) {
2166 super->print_nonstatic_fields(cl);
2167 }
2168 ResourceMark rm;
2169 // In DebugInfo nonstatic fields are sorted by offset.
2170 GrowableArray<FieldInfo> fields_sorted;
2171 for (AllFieldStream fs(this); !fs.done(); fs.next()) {
2172 if (!fs.access_flags().is_static()) {
2173 fields_sorted.push(fs.to_FieldInfo());
2174 }
2175 }
2176 int length = fields_sorted.length();
2177 if (length > 0) {
2178 fields_sorted.sort(compare_fields_by_offset);
2179 fieldDescriptor fd;
2180 for (int i = 0; i < length; i++) {
2181 fd.reinitialize(this, fields_sorted.at(i));
2182 assert(!fd.is_static() && fd.offset() == checked_cast<int>(fields_sorted.at(i).offset()), "only nonstatic fields");
2183 cl->do_field(&fd);
2184 }
2185 }
2186 }
2187
2188 #ifdef ASSERT
2189 static int linear_search(const Array<Method*>* methods,
2190 const Symbol* name,
2191 const Symbol* signature) {
2192 const int len = methods->length();
2193 for (int index = 0; index < len; index++) {
2194 const Method* const m = methods->at(index);
2195 assert(m->is_method(), "must be method");
2196 if (m->signature() == signature && m->name() == name) {
2197 return index;
2198 }
2199 }
2200 return -1;
2201 }
2202 #endif
2203
2204 bool InstanceKlass::_disable_method_binary_search = false;
2205
2206 NOINLINE int linear_search(const Array<Method*>* methods, const Symbol* name) {
2207 int len = methods->length();
2208 int l = 0;
2209 int h = len - 1;
2210 while (l <= h) {
2211 Method* m = methods->at(l);
2212 if (m->name() == name) {
2213 return l;
2214 }
2215 l++;
2216 }
2217 return -1;
2218 }
2219
2220 inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
2221 if (_disable_method_binary_search) {
2222 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
2223 // At the final stage of dynamic dumping, the methods array may not be sorted
2224 // by ascending addresses of their names, so we can't use binary search anymore.
2225 // However, methods with the same name are still laid out consecutively inside the
2226 // methods array, so let's look for the first one that matches.
2227 return linear_search(methods, name);
2228 }
2229
2230 int len = methods->length();
2231 int l = 0;
2232 int h = len - 1;
2233
2234 // methods are sorted by ascending addresses of their names, so do binary search
2235 while (l <= h) {
2236 int mid = (l + h) >> 1;
2237 Method* m = methods->at(mid);
2238 assert(m->is_method(), "must be method");
2239 int res = m->name()->fast_compare(name);
2240 if (res == 0) {
2241 return mid;
2242 } else if (res < 0) {
2243 l = mid + 1;
2244 } else {
2245 h = mid - 1;
2246 }
2247 }
2248 return -1;
2249 }
2250
2251 // find_method looks up the name/signature in the local methods array
2252 Method* InstanceKlass::find_method(const Symbol* name,
2253 const Symbol* signature) const {
2254 return find_method_impl(name, signature,
2255 OverpassLookupMode::find,
2256 StaticLookupMode::find,
2257 PrivateLookupMode::find);
2258 }
2259
2260 Method* InstanceKlass::find_method_impl(const Symbol* name,
2261 const Symbol* signature,
2262 OverpassLookupMode overpass_mode,
2263 StaticLookupMode static_mode,
2264 PrivateLookupMode private_mode) const {
2265 return InstanceKlass::find_method_impl(methods(),
2266 name,
2267 signature,
2268 overpass_mode,
2269 static_mode,
2270 private_mode);
2271 }
2272
2273 // find_instance_method looks up the name/signature in the local methods array
2274 // and skips over static methods
2275 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
2276 const Symbol* name,
2277 const Symbol* signature,
2278 PrivateLookupMode private_mode) {
2279 Method* const meth = InstanceKlass::find_method_impl(methods,
2280 name,
2281 signature,
2282 OverpassLookupMode::find,
2283 StaticLookupMode::skip,
2284 private_mode);
2285 assert(((meth == nullptr) || !meth->is_static()),
2286 "find_instance_method should have skipped statics");
2287 return meth;
2288 }
2289
2290 // find_instance_method looks up the name/signature in the local methods array
2291 // and skips over static methods
2292 Method* InstanceKlass::find_instance_method(const Symbol* name,
2293 const Symbol* signature,
2294 PrivateLookupMode private_mode) const {
2295 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
2296 }
2297
2298 // Find looks up the name/signature in the local methods array
2299 // and filters on the overpass, static and private flags
2300 // This returns the first one found
2301 // note that the local methods array can have up to one overpass, one static
2302 // and one instance (private or not) with the same name/signature
2303 Method* InstanceKlass::find_local_method(const Symbol* name,
2304 const Symbol* signature,
2305 OverpassLookupMode overpass_mode,
2306 StaticLookupMode static_mode,
2307 PrivateLookupMode private_mode) const {
2308 return InstanceKlass::find_method_impl(methods(),
2309 name,
2310 signature,
2311 overpass_mode,
2312 static_mode,
2313 private_mode);
2314 }
2315
2316 // Find looks up the name/signature in the local methods array
2317 // and filters on the overpass, static and private flags
2318 // This returns the first one found
2319 // note that the local methods array can have up to one overpass, one static
2320 // and one instance (private or not) with the same name/signature
2321 Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
2322 const Symbol* name,
2323 const Symbol* signature,
2324 OverpassLookupMode overpass_mode,
2325 StaticLookupMode static_mode,
2326 PrivateLookupMode private_mode) {
2327 return InstanceKlass::find_method_impl(methods,
2328 name,
2329 signature,
2330 overpass_mode,
2331 static_mode,
2332 private_mode);
2333 }
2334
2335 Method* InstanceKlass::find_method(const Array<Method*>* methods,
2336 const Symbol* name,
2337 const Symbol* signature) {
2338 return InstanceKlass::find_method_impl(methods,
2339 name,
2340 signature,
2341 OverpassLookupMode::find,
2342 StaticLookupMode::find,
2343 PrivateLookupMode::find);
2344 }
2345
2346 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
2347 const Symbol* name,
2348 const Symbol* signature,
2349 OverpassLookupMode overpass_mode,
2350 StaticLookupMode static_mode,
2351 PrivateLookupMode private_mode) {
2352 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
2353 return hit >= 0 ? methods->at(hit): nullptr;
2354 }
2355
2356 // true if method matches signature and conforms to skipping_X conditions.
2357 static bool method_matches(const Method* m,
2358 const Symbol* signature,
2359 bool skipping_overpass,
2360 bool skipping_static,
2361 bool skipping_private) {
2362 return ((m->signature() == signature) &&
2363 (!skipping_overpass || !m->is_overpass()) &&
2364 (!skipping_static || !m->is_static()) &&
2365 (!skipping_private || !m->is_private()));
2366 }
2367
2368 // Used directly for default_methods to find the index into the
2369 // default_vtable_indices, and indirectly by find_method
2370 // find_method_index looks in the local methods array to return the index
2371 // of the matching name/signature. If, overpass methods are being ignored,
2372 // the search continues to find a potential non-overpass match. This capability
2373 // is important during method resolution to prefer a static method, for example,
2374 // over an overpass method.
2375 // There is the possibility in any _method's array to have the same name/signature
2376 // for a static method, an overpass method and a local instance method
2377 // To correctly catch a given method, the search criteria may need
2378 // to explicitly skip the other two. For local instance methods, it
2379 // is often necessary to skip private methods
2380 int InstanceKlass::find_method_index(const Array<Method*>* methods,
2381 const Symbol* name,
2382 const Symbol* signature,
2383 OverpassLookupMode overpass_mode,
2384 StaticLookupMode static_mode,
2385 PrivateLookupMode private_mode) {
2386 const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip);
2387 const bool skipping_static = (static_mode == StaticLookupMode::skip);
2388 const bool skipping_private = (private_mode == PrivateLookupMode::skip);
2389 const int hit = quick_search(methods, name);
2390 if (hit != -1) {
2391 const Method* const m = methods->at(hit);
2392
2393 // Do linear search to find matching signature. First, quick check
2394 // for common case, ignoring overpasses if requested.
2395 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2396 return hit;
2397 }
2398
2399 // search downwards through overloaded methods
2400 int i;
2401 for (i = hit - 1; i >= 0; --i) {
2402 const Method* const m = methods->at(i);
2403 assert(m->is_method(), "must be method");
2404 if (m->name() != name) {
2405 break;
2406 }
2407 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2408 return i;
2409 }
2410 }
2411 // search upwards
2412 for (i = hit + 1; i < methods->length(); ++i) {
2413 const Method* const m = methods->at(i);
2414 assert(m->is_method(), "must be method");
2415 if (m->name() != name) {
2416 break;
2417 }
2418 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
2419 return i;
2420 }
2421 }
2422 // not found
2423 #ifdef ASSERT
2424 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
2425 linear_search(methods, name, signature);
2426 assert(-1 == index, "binary search should have found entry %d", index);
2427 #endif
2428 }
2429 return -1;
2430 }
2431
2432 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
2433 return find_method_by_name(methods(), name, end);
2434 }
2435
2436 int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
2437 const Symbol* name,
2438 int* end_ptr) {
2439 assert(end_ptr != nullptr, "just checking");
2440 int start = quick_search(methods, name);
2441 int end = start + 1;
2442 if (start != -1) {
2443 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
2444 while (end < methods->length() && (methods->at(end))->name() == name) ++end;
2445 *end_ptr = end;
2446 return start;
2447 }
2448 return -1;
2449 }
2450
2451 // uncached_lookup_method searches both the local class methods array and all
2452 // superclasses methods arrays, skipping any overpass methods in superclasses,
2453 // and possibly skipping private methods.
2454 Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
2455 const Symbol* signature,
2456 OverpassLookupMode overpass_mode,
2457 PrivateLookupMode private_mode) const {
2458 OverpassLookupMode overpass_local_mode = overpass_mode;
2459 const InstanceKlass* klass = this;
2460 while (klass != nullptr) {
2461 Method* const method = klass->find_method_impl(name,
2462 signature,
2463 overpass_local_mode,
2464 StaticLookupMode::find,
2465 private_mode);
2466 if (method != nullptr) {
2467 return method;
2468 }
2469 if (name == vmSymbols::object_initializer_name()) {
2470 break; // <init> is never inherited
2471 }
2472 klass = klass->super();
2473 overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses
2474 }
2475 return nullptr;
2476 }
2477
2478 #ifdef ASSERT
2479 // search through class hierarchy and return true if this class or
2480 // one of the superclasses was redefined
2481 bool InstanceKlass::has_redefined_this_or_super() const {
2482 const InstanceKlass* klass = this;
2483 while (klass != nullptr) {
2484 if (klass->has_been_redefined()) {
2485 return true;
2486 }
2487 klass = klass->super();
2488 }
2489 return false;
2490 }
2491 #endif
2492
2493 // lookup a method in the default methods list then in all transitive interfaces
2494 // Do NOT return private or static methods
2495 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
2496 Symbol* signature) const {
2497 Method* m = nullptr;
2498 if (default_methods() != nullptr) {
2499 m = find_method(default_methods(), name, signature);
2500 }
2501 // Look up interfaces
2502 if (m == nullptr) {
2503 m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find);
2504 }
2505 return m;
2506 }
2507
2508 // lookup a method in all the interfaces that this class implements
2509 // Do NOT return private or static methods, new in JDK8 which are not externally visible
2510 // They should only be found in the initial InterfaceMethodRef
2511 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
2512 Symbol* signature,
2513 DefaultsLookupMode defaults_mode) const {
2514 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2515 int num_ifs = all_ifs->length();
2516 InstanceKlass *ik = nullptr;
2517 for (int i = 0; i < num_ifs; i++) {
2518 ik = all_ifs->at(i);
2519 Method* m = ik->lookup_method(name, signature);
2520 if (m != nullptr && m->is_public() && !m->is_static() &&
2521 ((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) {
2522 return m;
2523 }
2524 }
2525 return nullptr;
2526 }
2527
2528 PrintClassClosure::PrintClassClosure(outputStream* st, bool verbose)
2529 :_st(st), _verbose(verbose) {
2530 ResourceMark rm;
2531 _st->print("%-18s ", "KlassAddr");
2532 _st->print("%-4s ", "Size");
2533 _st->print("%-20s ", "State");
2534 _st->print("%-7s ", "Flags");
2535 _st->print("%-5s ", "ClassName");
2536 _st->cr();
2537 }
2538
2539 void PrintClassClosure::do_klass(Klass* k) {
2540 ResourceMark rm;
2541 // klass pointer
2542 _st->print(PTR_FORMAT " ", p2i(k));
2543 // klass size
2544 _st->print("%4d ", k->size());
2545 // initialization state
2546 if (k->is_instance_klass()) {
2547 _st->print("%-20s ",InstanceKlass::cast(k)->init_state_name());
2548 } else {
2549 _st->print("%-20s ","");
2550 }
2551 // misc flags(Changes should synced with ClassesDCmd::ClassesDCmd help doc)
2552 char buf[10];
2553 int i = 0;
2554 if (k->has_finalizer()) buf[i++] = 'F';
2555 if (k->is_instance_klass()) {
2556 InstanceKlass* ik = InstanceKlass::cast(k);
2557 if (ik->has_final_method()) buf[i++] = 'f';
2558 if (ik->is_rewritten()) buf[i++] = 'W';
2559 if (ik->is_contended()) buf[i++] = 'C';
2560 if (ik->has_been_redefined()) buf[i++] = 'R';
2561 if (ik->in_aot_cache()) buf[i++] = 'S';
2562 }
2563 buf[i++] = '\0';
2564 _st->print("%-7s ", buf);
2565 // klass name
2566 _st->print("%-5s ", k->external_name());
2567 // end
2568 _st->cr();
2569 if (_verbose) {
2570 k->print_on(_st);
2571 }
2572 }
2573
2574 /* jni_id_for for jfieldIds only */
2575 JNIid* InstanceKlass::jni_id_for(int offset) {
2576 MutexLocker ml(JfieldIdCreation_lock);
2577 JNIid* probe = jni_ids() == nullptr ? nullptr : jni_ids()->find(offset);
2578 if (probe == nullptr) {
2579 // Allocate new static field identifier
2580 probe = new JNIid(this, offset, jni_ids());
2581 set_jni_ids(probe);
2582 }
2583 return probe;
2584 }
2585
2586 u2 InstanceKlass::enclosing_method_data(int offset) const {
2587 const Array<jushort>* const inner_class_list = inner_classes();
2588 if (inner_class_list == nullptr) {
2589 return 0;
2590 }
2591 const int length = inner_class_list->length();
2592 if (length % inner_class_next_offset == 0) {
2593 return 0;
2594 }
2595 const int index = length - enclosing_method_attribute_size;
2596 assert(offset < enclosing_method_attribute_size, "invalid offset");
2597 return inner_class_list->at(index + offset);
2598 }
2599
2600 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
2601 u2 method_index) {
2602 Array<jushort>* inner_class_list = inner_classes();
2603 assert (inner_class_list != nullptr, "_inner_classes list is not set up");
2604 int length = inner_class_list->length();
2605 if (length % inner_class_next_offset == enclosing_method_attribute_size) {
2606 int index = length - enclosing_method_attribute_size;
2607 inner_class_list->at_put(
2608 index + enclosing_method_class_index_offset, class_index);
2609 inner_class_list->at_put(
2610 index + enclosing_method_method_index_offset, method_index);
2611 }
2612 }
2613
2614 jmethodID InstanceKlass::update_jmethod_id(jmethodID* jmeths, Method* method, int idnum) {
2615 if (method->is_old() && !method->is_obsolete()) {
2616 // If the method passed in is old (but not obsolete), use the current version.
2617 method = method_with_idnum((int)idnum);
2618 assert(method != nullptr, "old and but not obsolete, so should exist");
2619 }
2620 jmethodID new_id = Method::make_jmethod_id(class_loader_data(), method);
2621 AtomicAccess::release_store(&jmeths[idnum + 1], new_id);
2622 return new_id;
2623 }
2624
2625 // Allocate the jmethodID cache.
2626 static jmethodID* create_jmethod_id_cache(size_t size) {
2627 jmethodID* jmeths = NEW_C_HEAP_ARRAY(jmethodID, size + 1, mtClass);
2628 memset(jmeths, 0, (size + 1) * sizeof(jmethodID));
2629 // cache size is stored in element[0], other elements offset by one
2630 jmeths[0] = (jmethodID)size;
2631 return jmeths;
2632 }
2633
2634 // When reading outside a lock, use this.
2635 jmethodID* InstanceKlass::methods_jmethod_ids_acquire() const {
2636 return AtomicAccess::load_acquire(&_methods_jmethod_ids);
2637 }
2638
2639 void InstanceKlass::release_set_methods_jmethod_ids(jmethodID* jmeths) {
2640 AtomicAccess::release_store(&_methods_jmethod_ids, jmeths);
2641 }
2642
2643 // Lookup or create a jmethodID.
2644 jmethodID InstanceKlass::get_jmethod_id(Method* method) {
2645 int idnum = method->method_idnum();
2646 jmethodID* jmeths = methods_jmethod_ids_acquire();
2647
2648 // We use a double-check locking idiom here because this cache is
2649 // performance sensitive. In the normal system, this cache only
2650 // transitions from null to non-null which is safe because we use
2651 // release_set_methods_jmethod_ids() to advertise the new cache.
2652 // A partially constructed cache should never be seen by a racing
2653 // thread. We also use release_store() to save a new jmethodID
2654 // in the cache so a partially constructed jmethodID should never be
2655 // seen either. Cache reads of existing jmethodIDs proceed without a
2656 // lock, but cache writes of a new jmethodID requires uniqueness and
2657 // creation of the cache itself requires no leaks so a lock is
2658 // acquired in those two cases.
2659 //
2660 // If the RedefineClasses() API has been used, then this cache grows
2661 // in the redefinition safepoint.
2662
2663 if (jmeths == nullptr) {
2664 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2665 jmeths = _methods_jmethod_ids;
2666 // Still null?
2667 if (jmeths == nullptr) {
2668 size_t size = idnum_allocated_count();
2669 assert(size > (size_t)idnum, "should already have space");
2670 jmeths = create_jmethod_id_cache(size);
2671 jmethodID new_id = update_jmethod_id(jmeths, method, idnum);
2672
2673 // publish jmeths
2674 release_set_methods_jmethod_ids(jmeths);
2675 return new_id;
2676 }
2677 }
2678
2679 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2680 if (id == nullptr) {
2681 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2682 id = jmeths[idnum + 1];
2683 // Still null?
2684 if (id == nullptr) {
2685 return update_jmethod_id(jmeths, method, idnum);
2686 }
2687 }
2688 return id;
2689 }
2690
2691 void InstanceKlass::update_methods_jmethod_cache() {
2692 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2693 jmethodID* cache = _methods_jmethod_ids;
2694 if (cache != nullptr) {
2695 size_t size = idnum_allocated_count();
2696 size_t old_size = (size_t)cache[0];
2697 if (old_size < size + 1) {
2698 // Allocate a larger one and copy entries to the new one.
2699 // They've already been updated to point to new methods where applicable (i.e., not obsolete).
2700 jmethodID* new_cache = create_jmethod_id_cache(size);
2701
2702 for (int i = 1; i <= (int)old_size; i++) {
2703 new_cache[i] = cache[i];
2704 }
2705 _methods_jmethod_ids = new_cache;
2706 FREE_C_HEAP_ARRAY(jmethodID, cache);
2707 }
2708 }
2709 }
2710
2711 // Make a jmethodID for all methods in this class. This makes getting all method
2712 // ids much, much faster with classes with more than 8
2713 // methods, and has a *substantial* effect on performance with jvmti
2714 // code that loads all jmethodIDs for all classes.
2715 void InstanceKlass::make_methods_jmethod_ids() {
2716 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2717 jmethodID* jmeths = _methods_jmethod_ids;
2718 if (jmeths == nullptr) {
2719 jmeths = create_jmethod_id_cache(idnum_allocated_count());
2720 release_set_methods_jmethod_ids(jmeths);
2721 }
2722
2723 int length = methods()->length();
2724 for (int index = 0; index < length; index++) {
2725 Method* m = methods()->at(index);
2726 int idnum = m->method_idnum();
2727 assert(!m->is_old(), "should not have old methods or I'm confused");
2728 jmethodID id = AtomicAccess::load_acquire(&jmeths[idnum + 1]);
2729 if (!m->is_overpass() && // skip overpasses
2730 id == nullptr) {
2731 id = Method::make_jmethod_id(class_loader_data(), m);
2732 AtomicAccess::release_store(&jmeths[idnum + 1], id);
2733 }
2734 }
2735 }
2736
2737 // Lookup a jmethodID, null if not found. Do no blocking, no allocations, no handles
2738 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2739 int idnum = method->method_idnum();
2740 jmethodID* jmeths = methods_jmethod_ids_acquire();
2741 return (jmeths != nullptr) ? jmeths[idnum + 1] : nullptr;
2742 }
2743
2744 inline DependencyContext InstanceKlass::dependencies() {
2745 DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2746 return dep_context;
2747 }
2748
2749 void InstanceKlass::mark_dependent_nmethods(DeoptimizationScope* deopt_scope, KlassDepChange& changes) {
2750 dependencies().mark_dependent_nmethods(deopt_scope, changes);
2751 }
2752
2753 void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2754 assert_lock_strong(CodeCache_lock);
2755 dependencies().add_dependent_nmethod(nm);
2756 }
2757
2758 void InstanceKlass::clean_dependency_context() {
2759 dependencies().clean_unloading_dependents();
2760 }
2761
2762 #ifndef PRODUCT
2763 void InstanceKlass::print_dependent_nmethods(bool verbose) {
2764 dependencies().print_dependent_nmethods(verbose);
2765 }
2766
2767 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2768 return dependencies().is_dependent_nmethod(nm);
2769 }
2770 #endif //PRODUCT
2771
2772 void InstanceKlass::clean_weak_instanceklass_links() {
2773 clean_implementors_list();
2774 clean_method_data();
2775 }
2776
2777 void InstanceKlass::clean_implementors_list() {
2778 assert(is_loader_alive(), "this klass should be live");
2779 if (is_interface()) {
2780 assert (ClassUnloading, "only called for ClassUnloading");
2781 for (;;) {
2782 // Use load_acquire due to competing with inserts
2783 InstanceKlass* volatile* iklass = adr_implementor();
2784 assert(iklass != nullptr, "Klass must not be null");
2785 InstanceKlass* impl = AtomicAccess::load_acquire(iklass);
2786 if (impl != nullptr && !impl->is_loader_alive()) {
2787 // null this field, might be an unloaded instance klass or null
2788 if (AtomicAccess::cmpxchg(iklass, impl, (InstanceKlass*)nullptr) == impl) {
2789 // Successfully unlinking implementor.
2790 if (log_is_enabled(Trace, class, unload)) {
2791 ResourceMark rm;
2792 log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2793 }
2794 return;
2795 }
2796 } else {
2797 return;
2798 }
2799 }
2800 }
2801 }
2802
2803 void InstanceKlass::clean_method_data() {
2804 for (int m = 0; m < methods()->length(); m++) {
2805 MethodData* mdo = methods()->at(m)->method_data();
2806 if (mdo != nullptr) {
2807 mdo->clean_method_data(/*always_clean*/false);
2808 }
2809 }
2810 }
2811
2812 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
2813 Klass::metaspace_pointers_do(it);
2814
2815 if (log_is_enabled(Trace, aot)) {
2816 ResourceMark rm;
2817 log_trace(aot)("Iter(InstanceKlass): %p (%s)", this, external_name());
2818 }
2819
2820 it->push(&_annotations);
2821 it->push((Klass**)&_array_klasses);
2822 if (!is_rewritten()) {
2823 it->push(&_constants, MetaspaceClosure::_writable);
2824 } else {
2825 it->push(&_constants);
2826 }
2827 it->push(&_inner_classes);
2828 #if INCLUDE_JVMTI
2829 it->push(&_previous_versions);
2830 #endif
2831 #if INCLUDE_CDS
2832 // For "old" classes with methods containing the jsr bytecode, the _methods array will
2833 // be rewritten during runtime (see Rewriter::rewrite_jsrs()) but they cannot be safely
2834 // checked here with ByteCodeStream. All methods that can't be verified are made writable.
2835 // The length check on the _methods is necessary because classes which don't have any
2836 // methods share the Universe::_the_empty_method_array which is in the RO region.
2837 if (_methods != nullptr && _methods->length() > 0 && !can_be_verified_at_dumptime()) {
2838 // To handle jsr bytecode, new Method* maybe stored into _methods
2839 it->push(&_methods, MetaspaceClosure::_writable);
2840 } else {
2841 #endif
2842 it->push(&_methods);
2843 #if INCLUDE_CDS
2844 }
2845 #endif
2846 it->push(&_default_methods);
2847 it->push(&_local_interfaces);
2848 it->push(&_transitive_interfaces);
2849 it->push(&_method_ordering);
2850 if (!is_rewritten()) {
2851 it->push(&_default_vtable_indices, MetaspaceClosure::_writable);
2852 } else {
2853 it->push(&_default_vtable_indices);
2854 }
2855
2856 it->push(&_fieldinfo_stream);
2857 it->push(&_fieldinfo_search_table);
2858 // _fields_status might be written into by Rewriter::scan_method() -> fd.set_has_initialized_final_update()
2859 it->push(&_fields_status, MetaspaceClosure::_writable);
2860
2861 if (itable_length() > 0) {
2862 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2863 int method_table_offset_in_words = ioe->offset()/wordSize;
2864 int itable_offset_in_words = (int)(start_of_itable() - (intptr_t*)this);
2865
2866 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words)
2867 / itableOffsetEntry::size();
2868
2869 for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
2870 if (ioe->interface_klass() != nullptr) {
2871 it->push(ioe->interface_klass_addr());
2872 itableMethodEntry* ime = ioe->first_method_entry(this);
2873 int n = klassItable::method_count_for_interface(ioe->interface_klass());
2874 for (int index = 0; index < n; index ++) {
2875 it->push(ime[index].method_addr());
2876 }
2877 }
2878 }
2879 }
2880
2881 it->push(&_nest_host);
2882 it->push(&_nest_members);
2883 it->push(&_permitted_subclasses);
2884 it->push(&_loadable_descriptors);
2885 it->push(&_record_components);
2886 it->push(&_inline_layout_info_array, MetaspaceClosure::_writable);
2887 }
2888
2889 #if INCLUDE_CDS
2890 void InstanceKlass::remove_unshareable_info() {
2891
2892 if (is_linked()) {
2893 assert(can_be_verified_at_dumptime(), "must be");
2894 // Remember this so we can avoid walking the hierarchy at runtime.
2895 set_verified_at_dump_time();
2896 }
2897
2898 _misc_flags.set_has_init_deps_processed(false);
2899
2900 Klass::remove_unshareable_info();
2901
2902 if (SystemDictionaryShared::has_class_failed_verification(this)) {
2903 // Classes are attempted to link during dumping and may fail,
2904 // but these classes are still in the dictionary and class list in CLD.
2905 // If the class has failed verification, there is nothing else to remove.
2906 return;
2907 }
2908
2909 // Reset to the 'allocated' state to prevent any premature accessing to
2910 // a shared class at runtime while the class is still being loaded and
2911 // restored. A class' init_state is set to 'loaded' at runtime when it's
2912 // being added to class hierarchy (see InstanceKlass:::add_to_hierarchy()).
2913 _init_state = allocated;
2914
2915 { // Otherwise this needs to take out the Compile_lock.
2916 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2917 init_implementor();
2918 }
2919
2920 // Call remove_unshareable_info() on other objects that belong to this class, except
2921 // for constants()->remove_unshareable_info(), which is called in a separate pass in
2922 // ArchiveBuilder::make_klasses_shareable(),
2923
2924 for (int i = 0; i < methods()->length(); i++) {
2925 Method* m = methods()->at(i);
2926 m->remove_unshareable_info();
2927 }
2928
2929 // do array classes also.
2930 if (array_klasses() != nullptr) {
2931 array_klasses()->remove_unshareable_info();
2932 }
2933
2934 // These are not allocated from metaspace. They are safe to set to nullptr.
2935 _source_debug_extension = nullptr;
2936 _dep_context = nullptr;
2937 _osr_nmethods_head = nullptr;
2938 #if INCLUDE_JVMTI
2939 _breakpoints = nullptr;
2940 _previous_versions = nullptr;
2941 _cached_class_file = nullptr;
2942 _jvmti_cached_class_field_map = nullptr;
2943 #endif
2944
2945 _init_thread = nullptr;
2946 _methods_jmethod_ids = nullptr;
2947 _jni_ids = nullptr;
2948 _oop_map_cache = nullptr;
2949 if (CDSConfig::is_dumping_method_handles() && HeapShared::is_lambda_proxy_klass(this)) {
2950 // keep _nest_host
2951 } else {
2952 // clear _nest_host to ensure re-load at runtime
2953 _nest_host = nullptr;
2954 }
2955 init_shared_package_entry();
2956 _dep_context_last_cleaned = 0;
2957 DEBUG_ONLY(_shared_class_load_count = 0);
2958
2959 remove_unshareable_flags();
2960
2961 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
2962 }
2963
2964 void InstanceKlass::remove_unshareable_flags() {
2965 // clear all the flags/stats that shouldn't be in the archived version
2966 assert(!is_scratch_class(), "must be");
2967 assert(!has_been_redefined(), "must be");
2968 #if INCLUDE_JVMTI
2969 set_is_being_redefined(false);
2970 #endif
2971 set_has_resolved_methods(false);
2972 }
2973
2974 void InstanceKlass::remove_java_mirror() {
2975 Klass::remove_java_mirror();
2976
2977 // do array classes also.
2978 if (array_klasses() != nullptr) {
2979 array_klasses()->remove_java_mirror();
2980 }
2981 }
2982
2983 void InstanceKlass::init_shared_package_entry() {
2984 assert(CDSConfig::is_dumping_archive(), "must be");
2985 #if !INCLUDE_CDS_JAVA_HEAP
2986 _package_entry = nullptr;
2987 #else
2988 if (CDSConfig::is_dumping_full_module_graph()) {
2989 if (defined_by_other_loaders()) {
2990 _package_entry = nullptr;
2991 } else {
2992 _package_entry = PackageEntry::get_archived_entry(_package_entry);
2993 }
2994 } else if (CDSConfig::is_dumping_dynamic_archive() &&
2995 CDSConfig::is_using_full_module_graph() &&
2996 AOTMetaspace::in_aot_cache(_package_entry)) {
2997 // _package_entry is an archived package in the base archive. Leave it as is.
2998 } else {
2999 _package_entry = nullptr;
3000 }
3001 ArchivePtrMarker::mark_pointer((address**)&_package_entry);
3002 #endif
3003 }
3004
3005 void InstanceKlass::compute_has_loops_flag_for_methods() {
3006 Array<Method*>* methods = this->methods();
3007 for (int index = 0; index < methods->length(); ++index) {
3008 Method* m = methods->at(index);
3009 if (!m->is_overpass()) { // work around JDK-8305771
3010 m->compute_has_loops_flag();
3011 }
3012 }
3013 }
3014
3015 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain,
3016 PackageEntry* pkg_entry, TRAPS) {
3017 // InstanceKlass::add_to_hierarchy() sets the init_state to loaded
3018 // before the InstanceKlass is added to the SystemDictionary. Make
3019 // sure the current state is <loaded.
3020 assert(!is_loaded(), "invalid init state");
3021 assert(!shared_loading_failed(), "Must not try to load failed class again");
3022 set_package(loader_data, pkg_entry, CHECK);
3023 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
3024
3025 if (is_inline_klass()) {
3026 InlineKlass::cast(this)->initialize_calling_convention(CHECK);
3027 }
3028
3029 Array<Method*>* methods = this->methods();
3030 int num_methods = methods->length();
3031 for (int index = 0; index < num_methods; ++index) {
3032 methods->at(index)->restore_unshareable_info(CHECK);
3033 }
3034 #if INCLUDE_JVMTI
3035 if (JvmtiExport::has_redefined_a_class()) {
3036 // Reinitialize vtable because RedefineClasses may have changed some
3037 // entries in this vtable for super classes so the CDS vtable might
3038 // point to old or obsolete entries. RedefineClasses doesn't fix up
3039 // vtables in the shared system dictionary, only the main one.
3040 // It also redefines the itable too so fix that too.
3041 // First fix any default methods that point to a super class that may
3042 // have been redefined.
3043 bool trace_name_printed = false;
3044 adjust_default_methods(&trace_name_printed);
3045 if (verified_at_dump_time()) {
3046 // Initialize vtable and itable for classes which can be verified at dump time.
3047 // Unlinked classes such as old classes with major version < 50 cannot be verified
3048 // at dump time.
3049 vtable().initialize_vtable();
3050 itable().initialize_itable();
3051 }
3052 }
3053 #endif // INCLUDE_JVMTI
3054
3055 // restore constant pool resolved references
3056 constants()->restore_unshareable_info(CHECK);
3057
3058 if (array_klasses() != nullptr) {
3059 // To get a consistent list of classes we need MultiArray_lock to ensure
3060 // array classes aren't observed while they are being restored.
3061 RecursiveLocker rl(MultiArray_lock, THREAD);
3062 assert(this == ObjArrayKlass::cast(array_klasses())->bottom_klass(), "sanity");
3063 // Array classes have null protection domain.
3064 // --> see ArrayKlass::complete_create_array_klass()
3065 if (class_loader_data() == nullptr) {
3066 ResourceMark rm(THREAD);
3067 log_debug(cds)(" loader_data %s ", loader_data == nullptr ? "nullptr" : "non null");
3068 log_debug(cds)(" this %s array_klasses %s ", this->name()->as_C_string(), array_klasses()->name()->as_C_string());
3069 }
3070 assert(!array_klasses()->is_refined_objArray_klass(), "must be non-refined objarrayklass");
3071 array_klasses()->restore_unshareable_info(class_loader_data(), Handle(), CHECK);
3072 }
3073
3074 // Initialize @ValueBased class annotation if not already set in the archived klass.
3075 if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation() && !is_value_based()) {
3076 set_is_value_based();
3077 }
3078
3079 DEBUG_ONLY(FieldInfoStream::validate_search_table(_constants, _fieldinfo_stream, _fieldinfo_search_table));
3080 }
3081
3082 bool InstanceKlass::can_be_verified_at_dumptime() const {
3083 if (AOTMetaspace::in_aot_cache(this)) {
3084 // This is a class that was dumped into the base archive, so we know
3085 // it was verified at dump time.
3086 return true;
3087 }
3088
3089 if (CDSConfig::is_preserving_verification_constraints()) {
3090 return true;
3091 }
3092
3093 if (CDSConfig::is_old_class_for_verifier(this)) {
3094 // The old verifier does not save verification constraints, so at run time
3095 // SystemDictionaryShared::check_verification_constraints() will not work for this class.
3096 return false;
3097 }
3098 if (super() != nullptr && !super()->can_be_verified_at_dumptime()) {
3099 return false;
3100 }
3101 Array<InstanceKlass*>* interfaces = local_interfaces();
3102 int len = interfaces->length();
3103 for (int i = 0; i < len; i++) {
3104 if (!interfaces->at(i)->can_be_verified_at_dumptime()) {
3105 return false;
3106 }
3107 }
3108 return true;
3109 }
3110
3111 #endif // INCLUDE_CDS
3112
3113 #if INCLUDE_JVMTI
3114 static void clear_all_breakpoints(Method* m) {
3115 m->clear_all_breakpoints();
3116 }
3117 #endif
3118
3119 void InstanceKlass::unload_class(InstanceKlass* ik) {
3120
3121 if (ik->is_scratch_class()) {
3122 assert(ik->dependencies().is_empty(), "dependencies should be empty for scratch classes");
3123 return;
3124 }
3125 assert(ik->is_loaded(), "class should be loaded " PTR_FORMAT, p2i(ik));
3126
3127 // Release dependencies.
3128 ik->dependencies().remove_all_dependents();
3129
3130 // notify the debugger
3131 if (JvmtiExport::should_post_class_unload()) {
3132 JvmtiExport::post_class_unload(ik);
3133 }
3134
3135 // notify ClassLoadingService of class unload
3136 ClassLoadingService::notify_class_unloaded(ik);
3137
3138 SystemDictionaryShared::handle_class_unloading(ik);
3139
3140 if (log_is_enabled(Info, class, unload)) {
3141 ResourceMark rm;
3142 log_info(class, unload)("unloading class %s " PTR_FORMAT, ik->external_name(), p2i(ik));
3143 }
3144
3145 Events::log_class_unloading(Thread::current(), ik);
3146
3147 #if INCLUDE_JFR
3148 assert(ik != nullptr, "invariant");
3149 EventClassUnload event;
3150 event.set_unloadedClass(ik);
3151 event.set_definingClassLoader(ik->class_loader_data());
3152 event.commit();
3153 #endif
3154 }
3155
3156 static void method_release_C_heap_structures(Method* m) {
3157 m->release_C_heap_structures();
3158 }
3159
3160 // Called also by InstanceKlass::deallocate_contents, with false for release_sub_metadata.
3161 void InstanceKlass::release_C_heap_structures(bool release_sub_metadata) {
3162 // Clean up C heap
3163 Klass::release_C_heap_structures();
3164
3165 // Deallocate and call destructors for MDO mutexes
3166 if (release_sub_metadata) {
3167 methods_do(method_release_C_heap_structures);
3168 }
3169
3170 // Deallocate oop map cache
3171 if (_oop_map_cache != nullptr) {
3172 delete _oop_map_cache;
3173 _oop_map_cache = nullptr;
3174 }
3175
3176 // Deallocate JNI identifiers for jfieldIDs
3177 JNIid::deallocate(jni_ids());
3178 set_jni_ids(nullptr);
3179
3180 jmethodID* jmeths = _methods_jmethod_ids;
3181 if (jmeths != nullptr) {
3182 release_set_methods_jmethod_ids(nullptr);
3183 FreeHeap(jmeths);
3184 }
3185
3186 assert(_dep_context == nullptr,
3187 "dependencies should already be cleaned");
3188
3189 #if INCLUDE_JVMTI
3190 // Deallocate breakpoint records
3191 if (breakpoints() != nullptr) {
3192 methods_do(clear_all_breakpoints);
3193 assert(breakpoints() == nullptr, "should have cleared breakpoints");
3194 }
3195
3196 // deallocate the cached class file
3197 if (_cached_class_file != nullptr) {
3198 os::free(_cached_class_file);
3199 _cached_class_file = nullptr;
3200 }
3201 #endif
3202
3203 FREE_C_HEAP_ARRAY(char, _source_debug_extension);
3204
3205 if (release_sub_metadata) {
3206 constants()->release_C_heap_structures();
3207 }
3208 }
3209
3210 // The constant pool is on stack if any of the methods are executing or
3211 // referenced by handles.
3212 bool InstanceKlass::on_stack() const {
3213 return _constants->on_stack();
3214 }
3215
3216 Symbol* InstanceKlass::source_file_name() const { return _constants->source_file_name(); }
3217 u2 InstanceKlass::source_file_name_index() const { return _constants->source_file_name_index(); }
3218 void InstanceKlass::set_source_file_name_index(u2 sourcefile_index) { _constants->set_source_file_name_index(sourcefile_index); }
3219
3220 // minor and major version numbers of class file
3221 u2 InstanceKlass::minor_version() const { return _constants->minor_version(); }
3222 void InstanceKlass::set_minor_version(u2 minor_version) { _constants->set_minor_version(minor_version); }
3223 u2 InstanceKlass::major_version() const { return _constants->major_version(); }
3224 void InstanceKlass::set_major_version(u2 major_version) { _constants->set_major_version(major_version); }
3225
3226 bool InstanceKlass::supports_inline_types() const {
3227 return major_version() >= Verifier::VALUE_TYPES_MAJOR_VERSION && minor_version() == Verifier::JAVA_PREVIEW_MINOR_VERSION;
3228 }
3229
3230 const InstanceKlass* InstanceKlass::get_klass_version(int version) const {
3231 for (const InstanceKlass* ik = this; ik != nullptr; ik = ik->previous_versions()) {
3232 if (ik->constants()->version() == version) {
3233 return ik;
3234 }
3235 }
3236 return nullptr;
3237 }
3238
3239 void InstanceKlass::set_source_debug_extension(const char* array, int length) {
3240 if (array == nullptr) {
3241 _source_debug_extension = nullptr;
3242 } else {
3243 // Adding one to the attribute length in order to store a null terminator
3244 // character could cause an overflow because the attribute length is
3245 // already coded with an u4 in the classfile, but in practice, it's
3246 // unlikely to happen.
3247 assert((length+1) > length, "Overflow checking");
3248 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
3249 for (int i = 0; i < length; i++) {
3250 sde[i] = array[i];
3251 }
3252 sde[length] = '\0';
3253 _source_debug_extension = sde;
3254 }
3255 }
3256
3257 Symbol* InstanceKlass::generic_signature() const { return _constants->generic_signature(); }
3258 u2 InstanceKlass::generic_signature_index() const { return _constants->generic_signature_index(); }
3259 void InstanceKlass::set_generic_signature_index(u2 sig_index) { _constants->set_generic_signature_index(sig_index); }
3260
3261 const char* InstanceKlass::signature_name() const {
3262 return signature_name_of_carrier(JVM_SIGNATURE_CLASS);
3263 }
3264
3265 const char* InstanceKlass::signature_name_of_carrier(char c) const {
3266 // Get the internal name as a c string
3267 const char* src = (const char*) (name()->as_C_string());
3268 const int src_length = (int)strlen(src);
3269
3270 char* dest = NEW_RESOURCE_ARRAY(char, src_length + 3);
3271
3272 // Add L or Q as type indicator
3273 int dest_index = 0;
3274 dest[dest_index++] = c;
3275
3276 // Add the actual class name
3277 for (int src_index = 0; src_index < src_length; ) {
3278 dest[dest_index++] = src[src_index++];
3279 }
3280
3281 if (is_hidden()) { // Replace the last '+' with a '.'.
3282 for (int index = (int)src_length; index > 0; index--) {
3283 if (dest[index] == '+') {
3284 dest[index] = JVM_SIGNATURE_DOT;
3285 break;
3286 }
3287 }
3288 }
3289
3290 // Add the semicolon and the null
3291 dest[dest_index++] = JVM_SIGNATURE_ENDCLASS;
3292 dest[dest_index] = '\0';
3293 return dest;
3294 }
3295
3296 ModuleEntry* InstanceKlass::module() const {
3297 if (is_hidden() &&
3298 in_unnamed_package() &&
3299 class_loader_data()->has_class_mirror_holder()) {
3300 // For a non-strong hidden class defined to an unnamed package,
3301 // its (class held) CLD will not have an unnamed module created for it.
3302 // Two choices to find the correct ModuleEntry:
3303 // 1. If hidden class is within a nest, use nest host's module
3304 // 2. Find the unnamed module off from the class loader
3305 // For now option #2 is used since a nest host is not set until
3306 // after the instance class is created in jvm_lookup_define_class().
3307 if (class_loader_data()->is_boot_class_loader_data()) {
3308 return ClassLoaderData::the_null_class_loader_data()->unnamed_module();
3309 } else {
3310 oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader());
3311 assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module");
3312 return java_lang_Module::module_entry(module);
3313 }
3314 }
3315
3316 // Class is in a named package
3317 if (!in_unnamed_package()) {
3318 return _package_entry->module();
3319 }
3320
3321 // Class is in an unnamed package, return its loader's unnamed module
3322 return class_loader_data()->unnamed_module();
3323 }
3324
3325 bool InstanceKlass::in_javabase_module() const {
3326 return module()->name() == vmSymbols::java_base();
3327 }
3328
3329 void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPS) {
3330
3331 // ensure java/ packages only loaded by boot or platform builtin loaders
3332 // not needed for shared class since CDS does not archive prohibited classes.
3333 if (!in_aot_cache()) {
3334 check_prohibited_package(name(), loader_data, CHECK);
3335 }
3336
3337 if (in_aot_cache() && _package_entry != nullptr) {
3338 if (CDSConfig::is_using_full_module_graph() && _package_entry == pkg_entry) {
3339 // we can use the saved package
3340 assert(AOTMetaspace::in_aot_cache(_package_entry), "must be");
3341 return;
3342 } else {
3343 _package_entry = nullptr;
3344 }
3345 }
3346
3347 // ClassLoader::package_from_class_name has already incremented the refcount of the symbol
3348 // it returns, so we need to decrement it when the current function exits.
3349 TempNewSymbol from_class_name =
3350 (pkg_entry != nullptr) ? nullptr : ClassLoader::package_from_class_name(name());
3351
3352 Symbol* pkg_name;
3353 if (pkg_entry != nullptr) {
3354 pkg_name = pkg_entry->name();
3355 } else {
3356 pkg_name = from_class_name;
3357 }
3358
3359 if (pkg_name != nullptr && loader_data != nullptr) {
3360
3361 // Find in class loader's package entry table.
3362 _package_entry = pkg_entry != nullptr ? pkg_entry : loader_data->packages()->lookup_only(pkg_name);
3363
3364 // If the package name is not found in the loader's package
3365 // entry table, it is an indication that the package has not
3366 // been defined. Consider it defined within the unnamed module.
3367 if (_package_entry == nullptr) {
3368
3369 if (!ModuleEntryTable::javabase_defined()) {
3370 // Before java.base is defined during bootstrapping, define all packages in
3371 // the java.base module. If a non-java.base package is erroneously placed
3372 // in the java.base module it will be caught later when java.base
3373 // is defined by ModuleEntryTable::verify_javabase_packages check.
3374 assert(ModuleEntryTable::javabase_moduleEntry() != nullptr, JAVA_BASE_NAME " module is null");
3375 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, ModuleEntryTable::javabase_moduleEntry());
3376 } else {
3377 assert(loader_data->unnamed_module() != nullptr, "unnamed module is null");
3378 _package_entry = loader_data->packages()->create_entry_if_absent(pkg_name, loader_data->unnamed_module());
3379 }
3380
3381 // A package should have been successfully created
3382 DEBUG_ONLY(ResourceMark rm(THREAD));
3383 assert(_package_entry != nullptr, "Package entry for class %s not found, loader %s",
3384 name()->as_C_string(), loader_data->loader_name_and_id());
3385 }
3386
3387 if (log_is_enabled(Debug, module)) {
3388 ResourceMark rm(THREAD);
3389 ModuleEntry* m = _package_entry->module();
3390 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s",
3391 external_name(),
3392 pkg_name->as_C_string(),
3393 loader_data->loader_name_and_id(),
3394 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE));
3395 }
3396 } else {
3397 ResourceMark rm(THREAD);
3398 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
3399 external_name(),
3400 (loader_data != nullptr) ? loader_data->loader_name_and_id() : "null",
3401 UNNAMED_MODULE);
3402 }
3403 }
3404
3405 // Function set_classpath_index ensures that for a non-null _package_entry
3406 // of the InstanceKlass, the entry is in the boot loader's package entry table.
3407 // It then sets the classpath_index in the package entry record.
3408 //
3409 // The classpath_index field is used to find the entry on the boot loader class
3410 // path for packages with classes loaded by the boot loader from -Xbootclasspath/a
3411 // in an unnamed module. It is also used to indicate (for all packages whose
3412 // classes are loaded by the boot loader) that at least one of the package's
3413 // classes has been loaded.
3414 void InstanceKlass::set_classpath_index(s2 path_index) {
3415 if (_package_entry != nullptr) {
3416 DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();)
3417 assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same");
3418 assert(path_index != -1, "Unexpected classpath_index");
3419 _package_entry->set_classpath_index(path_index);
3420 }
3421 }
3422
3423 // different versions of is_same_class_package
3424
3425 bool InstanceKlass::is_same_class_package(const Klass* class2) const {
3426 oop classloader1 = this->class_loader();
3427 PackageEntry* classpkg1 = this->package();
3428 if (class2->is_objArray_klass()) {
3429 class2 = ObjArrayKlass::cast(class2)->bottom_klass();
3430 }
3431
3432 oop classloader2;
3433 PackageEntry* classpkg2;
3434 if (class2->is_instance_klass()) {
3435 classloader2 = class2->class_loader();
3436 classpkg2 = class2->package();
3437 } else {
3438 assert(class2->is_typeArray_klass(), "should be type array");
3439 classloader2 = nullptr;
3440 classpkg2 = nullptr;
3441 }
3442
3443 // Same package is determined by comparing class loader
3444 // and package entries. Both must be the same. This rule
3445 // applies even to classes that are defined in the unnamed
3446 // package, they still must have the same class loader.
3447 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
3448 return true;
3449 }
3450
3451 return false;
3452 }
3453
3454 // return true if this class and other_class are in the same package. Classloader
3455 // and classname information is enough to determine a class's package
3456 bool InstanceKlass::is_same_class_package(oop other_class_loader,
3457 const Symbol* other_class_name) const {
3458 if (class_loader() != other_class_loader) {
3459 return false;
3460 }
3461 if (name()->fast_compare(other_class_name) == 0) {
3462 return true;
3463 }
3464
3465 {
3466 ResourceMark rm;
3467
3468 bool bad_class_name = false;
3469 TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name);
3470 if (bad_class_name) {
3471 return false;
3472 }
3473 // Check that package_from_class_name() returns null, not "", if there is no package.
3474 assert(other_pkg == nullptr || other_pkg->utf8_length() > 0, "package name is empty string");
3475
3476 const Symbol* const this_package_name =
3477 this->package() != nullptr ? this->package()->name() : nullptr;
3478
3479 if (this_package_name == nullptr || other_pkg == nullptr) {
3480 // One of the two doesn't have a package. Only return true if the other
3481 // one also doesn't have a package.
3482 return this_package_name == other_pkg;
3483 }
3484
3485 // Check if package is identical
3486 return this_package_name->fast_compare(other_pkg) == 0;
3487 }
3488 }
3489
3490 static bool is_prohibited_package_slow(Symbol* class_name) {
3491 // Caller has ResourceMark
3492 int length;
3493 jchar* unicode = class_name->as_unicode(length);
3494 return (length >= 5 &&
3495 unicode[0] == 'j' &&
3496 unicode[1] == 'a' &&
3497 unicode[2] == 'v' &&
3498 unicode[3] == 'a' &&
3499 unicode[4] == '/');
3500 }
3501
3502 // Only boot and platform class loaders can define classes in "java/" packages.
3503 void InstanceKlass::check_prohibited_package(Symbol* class_name,
3504 ClassLoaderData* loader_data,
3505 TRAPS) {
3506 if (!loader_data->is_boot_class_loader_data() &&
3507 !loader_data->is_platform_class_loader_data() &&
3508 class_name != nullptr && class_name->utf8_length() >= 5) {
3509 ResourceMark rm(THREAD);
3510 bool prohibited;
3511 const u1* base = class_name->base();
3512 if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) {
3513 prohibited = is_prohibited_package_slow(class_name);
3514 } else {
3515 char* name = class_name->as_C_string();
3516 prohibited = (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/');
3517 }
3518 if (prohibited) {
3519 TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name);
3520 assert(pkg_name != nullptr, "Error in parsing package name starting with 'java/'");
3521 char* name = pkg_name->as_C_string();
3522 const char* class_loader_name = loader_data->loader_name_and_id();
3523 StringUtils::replace_no_expand(name, "/", ".");
3524 const char* msg_text1 = "Class loader (instance of): ";
3525 const char* msg_text2 = " tried to load prohibited package name: ";
3526 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
3527 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len);
3528 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
3529 THROW_MSG(vmSymbols::java_lang_SecurityException(), message);
3530 }
3531 }
3532 return;
3533 }
3534
3535 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const {
3536 constantPoolHandle i_cp(THREAD, constants());
3537 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
3538 int ioff = iter.inner_class_info_index();
3539 if (ioff != 0) {
3540 // Check to see if the name matches the class we're looking for
3541 // before attempting to find the class.
3542 if (i_cp->klass_name_at_matches(this, ioff)) {
3543 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
3544 if (this == inner_klass) {
3545 *ooff = iter.outer_class_info_index();
3546 *noff = iter.inner_name_index();
3547 return true;
3548 }
3549 }
3550 }
3551 }
3552 return false;
3553 }
3554
3555 void InstanceKlass::check_can_be_annotated_with_NullRestricted(InstanceKlass* type, Symbol* container_klass_name, TRAPS) {
3556 assert(type->is_instance_klass(), "Sanity check");
3557 if (type->is_identity_class()) {
3558 ResourceMark rm(THREAD);
3559 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(),
3560 err_msg("Class %s expects class %s to be a value class, but it is an identity class",
3561 container_klass_name->as_C_string(),
3562 type->external_name()));
3563 }
3564
3565 if (type->is_abstract()) {
3566 ResourceMark rm(THREAD);
3567 THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(),
3568 err_msg("Class %s expects class %s to be concrete value type, but it is an abstract class",
3569 container_klass_name->as_C_string(),
3570 type->external_name()));
3571 }
3572 }
3573
3574 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const {
3575 InstanceKlass* outer_klass = nullptr;
3576 *inner_is_member = false;
3577 int ooff = 0, noff = 0;
3578 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD);
3579 if (has_inner_classes_attr) {
3580 constantPoolHandle i_cp(THREAD, constants());
3581 if (ooff != 0) {
3582 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);
3583 if (!ok->is_instance_klass()) {
3584 // If the outer class is not an instance klass then it cannot have
3585 // declared any inner classes.
3586 ResourceMark rm(THREAD);
3587 // Names are all known to be < 64k so we know this formatted message is not excessively large.
3588 Exceptions::fthrow(
3589 THREAD_AND_LOCATION,
3590 vmSymbols::java_lang_IncompatibleClassChangeError(),
3591 "%s and %s disagree on InnerClasses attribute",
3592 ok->external_name(),
3593 external_name());
3594 return nullptr;
3595 }
3596 outer_klass = InstanceKlass::cast(ok);
3597 *inner_is_member = true;
3598 }
3599 if (nullptr == outer_klass) {
3600 // It may be a local class; try for that.
3601 int encl_method_class_idx = enclosing_method_class_index();
3602 if (encl_method_class_idx != 0) {
3603 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL);
3604 outer_klass = InstanceKlass::cast(ok);
3605 *inner_is_member = false;
3606 }
3607 }
3608 }
3609
3610 // If no inner class attribute found for this class.
3611 if (nullptr == outer_klass) return nullptr;
3612
3613 // Throws an exception if outer klass has not declared k as an inner klass
3614 // We need evidence that each klass knows about the other, or else
3615 // the system could allow a spoof of an inner class to gain access rights.
3616 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL);
3617 return outer_klass;
3618 }
3619
3620 u2 InstanceKlass::compute_modifier_flags() const {
3621 u2 access = access_flags().as_unsigned_short();
3622
3623 // But check if it happens to be member class.
3624 InnerClassesIterator iter(this);
3625 for (; !iter.done(); iter.next()) {
3626 int ioff = iter.inner_class_info_index();
3627 // Inner class attribute can be zero, skip it.
3628 // Strange but true: JVM spec. allows null inner class refs.
3629 if (ioff == 0) continue;
3630
3631 // only look at classes that are already loaded
3632 // since we are looking for the flags for our self.
3633 Symbol* inner_name = constants()->klass_name_at(ioff);
3634 if (name() == inner_name) {
3635 // This is really a member class.
3636 access = iter.inner_access_flags();
3637 break;
3638 }
3639 }
3640 return access;
3641 }
3642
3643 jint InstanceKlass::jvmti_class_status() const {
3644 jint result = 0;
3645
3646 if (is_linked()) {
3647 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
3648 }
3649
3650 if (is_initialized()) {
3651 assert(is_linked(), "Class status is not consistent");
3652 result |= JVMTI_CLASS_STATUS_INITIALIZED;
3653 }
3654 if (is_in_error_state()) {
3655 result |= JVMTI_CLASS_STATUS_ERROR;
3656 }
3657 return result;
3658 }
3659
3660 Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPS) {
3661 bool implements_interface; // initialized by method_at_itable_or_null
3662 Method* m = method_at_itable_or_null(holder, index,
3663 implements_interface); // out parameter
3664 if (m != nullptr) {
3665 assert(implements_interface, "sanity");
3666 return m;
3667 } else if (implements_interface) {
3668 // Throw AbstractMethodError since corresponding itable slot is empty.
3669 THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
3670 } else {
3671 // If the interface isn't implemented by the receiver class,
3672 // the VM should throw IncompatibleClassChangeError.
3673 ResourceMark rm(THREAD);
3674 stringStream ss;
3675 bool same_module = (module() == holder->module());
3676 ss.print("Receiver class %s does not implement "
3677 "the interface %s defining the method to be called "
3678 "(%s%s%s)",
3679 external_name(), holder->external_name(),
3680 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
3681 (same_module) ? "" : "; ",
3682 (same_module) ? "" : holder->class_in_module_of_loader());
3683 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string());
3684 }
3685 }
3686
3687 Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) {
3688 klassItable itable(this);
3689 for (int i = 0; i < itable.size_offset_table(); i++) {
3690 itableOffsetEntry* offset_entry = itable.offset_entry(i);
3691 if (offset_entry->interface_klass() == holder) {
3692 implements_interface = true;
3693 itableMethodEntry* ime = offset_entry->first_method_entry(this);
3694 Method* m = ime[index].method();
3695 return m;
3696 }
3697 }
3698 implements_interface = false;
3699 return nullptr; // offset entry not found
3700 }
3701
3702 int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) {
3703 assert(is_linked(), "required");
3704 assert(intf_method->method_holder()->is_interface(), "not an interface method");
3705 assert(is_subtype_of(intf_method->method_holder()), "interface not implemented");
3706
3707 int vtable_index = Method::invalid_vtable_index;
3708 Symbol* name = intf_method->name();
3709 Symbol* signature = intf_method->signature();
3710
3711 // First check in default method array
3712 if (!intf_method->is_abstract() && default_methods() != nullptr) {
3713 int index = find_method_index(default_methods(),
3714 name, signature,
3715 Klass::OverpassLookupMode::find,
3716 Klass::StaticLookupMode::find,
3717 Klass::PrivateLookupMode::find);
3718 if (index >= 0) {
3719 vtable_index = default_vtable_indices()->at(index);
3720 }
3721 }
3722 if (vtable_index == Method::invalid_vtable_index) {
3723 // get vtable_index for miranda methods
3724 klassVtable vt = vtable();
3725 vtable_index = vt.index_of_miranda(name, signature);
3726 }
3727 return vtable_index;
3728 }
3729
3730 #if INCLUDE_JVMTI
3731 // update default_methods for redefineclasses for methods that are
3732 // not yet in the vtable due to concurrent subclass define and superinterface
3733 // redefinition
3734 // Note: those in the vtable, should have been updated via adjust_method_entries
3735 void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
3736 // search the default_methods for uses of either obsolete or EMCP methods
3737 if (default_methods() != nullptr) {
3738 for (int index = 0; index < default_methods()->length(); index ++) {
3739 Method* old_method = default_methods()->at(index);
3740 if (old_method == nullptr || !old_method->is_old()) {
3741 continue; // skip uninteresting entries
3742 }
3743 assert(!old_method->is_deleted(), "default methods may not be deleted");
3744 Method* new_method = old_method->get_new_method();
3745 default_methods()->at_put(index, new_method);
3746
3747 if (log_is_enabled(Info, redefine, class, update)) {
3748 ResourceMark rm;
3749 if (!(*trace_name_printed)) {
3750 log_info(redefine, class, update)
3751 ("adjust: klassname=%s default methods from name=%s",
3752 external_name(), old_method->method_holder()->external_name());
3753 *trace_name_printed = true;
3754 }
3755 log_debug(redefine, class, update, vtables)
3756 ("default method update: %s(%s) ",
3757 new_method->name()->as_C_string(), new_method->signature()->as_C_string());
3758 }
3759 }
3760 }
3761 }
3762 #endif // INCLUDE_JVMTI
3763
3764 // On-stack replacement stuff
3765 void InstanceKlass::add_osr_nmethod(nmethod* n) {
3766 assert_lock_strong(NMethodState_lock);
3767 #ifndef PRODUCT
3768 nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3769 assert(prev == nullptr || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation),
3770 "redundant OSR recompilation detected. memory leak in CodeCache!");
3771 #endif
3772 // only one compilation can be active
3773 assert(n->is_osr_method(), "wrong kind of nmethod");
3774 n->set_osr_link(osr_nmethods_head());
3775 set_osr_nmethods_head(n);
3776 // Raise the highest osr level if necessary
3777 n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level()));
3778
3779 // Get rid of the osr methods for the same bci that have lower levels.
3780 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3781 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3782 if (inv != nullptr && inv->is_in_use()) {
3783 inv->make_not_entrant(nmethod::InvalidationReason::OSR_INVALIDATION_OF_LOWER_LEVEL);
3784 }
3785 }
3786 }
3787
3788 // Remove osr nmethod from the list. Return true if found and removed.
3789 bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3790 // This is a short non-blocking critical region, so the no safepoint check is ok.
3791 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3792 assert(n->is_osr_method(), "wrong kind of nmethod");
3793 nmethod* last = nullptr;
3794 nmethod* cur = osr_nmethods_head();
3795 int max_level = CompLevel_none; // Find the max comp level excluding n
3796 Method* m = n->method();
3797 // Search for match
3798 bool found = false;
3799 while(cur != nullptr && cur != n) {
3800 if (m == cur->method()) {
3801 // Find max level before n
3802 max_level = MAX2(max_level, cur->comp_level());
3803 }
3804 last = cur;
3805 cur = cur->osr_link();
3806 }
3807 nmethod* next = nullptr;
3808 if (cur == n) {
3809 found = true;
3810 next = cur->osr_link();
3811 if (last == nullptr) {
3812 // Remove first element
3813 set_osr_nmethods_head(next);
3814 } else {
3815 last->set_osr_link(next);
3816 }
3817 }
3818 n->set_osr_link(nullptr);
3819 cur = next;
3820 while (cur != nullptr) {
3821 // Find max level after n
3822 if (m == cur->method()) {
3823 max_level = MAX2(max_level, cur->comp_level());
3824 }
3825 cur = cur->osr_link();
3826 }
3827 m->set_highest_osr_comp_level(max_level);
3828 return found;
3829 }
3830
3831 int InstanceKlass::mark_osr_nmethods(DeoptimizationScope* deopt_scope, const Method* m) {
3832 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3833 nmethod* osr = osr_nmethods_head();
3834 int found = 0;
3835 while (osr != nullptr) {
3836 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3837 if (osr->method() == m) {
3838 deopt_scope->mark(osr);
3839 found++;
3840 }
3841 osr = osr->osr_link();
3842 }
3843 return found;
3844 }
3845
3846 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
3847 ConditionalMutexLocker ml(NMethodState_lock, !NMethodState_lock->owned_by_self(), Mutex::_no_safepoint_check_flag);
3848 nmethod* osr = osr_nmethods_head();
3849 nmethod* best = nullptr;
3850 while (osr != nullptr) {
3851 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3852 // There can be a time when a c1 osr method exists but we are waiting
3853 // for a c2 version. When c2 completes its osr nmethod we will trash
3854 // the c1 version and only be able to find the c2 version. However
3855 // while we overflow in the c1 code at back branches we don't want to
3856 // try and switch to the same code as we are already running
3857
3858 if (osr->method() == m &&
3859 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
3860 if (match_level) {
3861 if (osr->comp_level() == comp_level) {
3862 // Found a match - return it.
3863 return osr;
3864 }
3865 } else {
3866 if (best == nullptr || (osr->comp_level() > best->comp_level())) {
3867 if (osr->comp_level() == CompilationPolicy::highest_compile_level()) {
3868 // Found the best possible - return it.
3869 return osr;
3870 }
3871 best = osr;
3872 }
3873 }
3874 }
3875 osr = osr->osr_link();
3876 }
3877
3878 assert(match_level == false || best == nullptr, "shouldn't pick up anything if match_level is set");
3879 if (best != nullptr && best->comp_level() >= comp_level) {
3880 return best;
3881 }
3882 return nullptr;
3883 }
3884
3885 // -----------------------------------------------------------------------------------------------------
3886 // Printing
3887
3888 #define BULLET " - "
3889
3890 static const char* state_names[] = {
3891 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
3892 };
3893
3894 static void print_vtable(address self, intptr_t* start, int len, outputStream* st) {
3895 ResourceMark rm;
3896 int* forward_refs = NEW_RESOURCE_ARRAY(int, len);
3897 for (int i = 0; i < len; i++) forward_refs[i] = 0;
3898 for (int i = 0; i < len; i++) {
3899 intptr_t e = start[i];
3900 st->print("%d : " INTPTR_FORMAT, i, e);
3901 if (forward_refs[i] != 0) {
3902 int from = forward_refs[i];
3903 int off = (int) start[from];
3904 st->print(" (offset %d <= [%d])", off, from);
3905 }
3906 if (MetaspaceObj::is_valid((Metadata*)e)) {
3907 st->print(" ");
3908 ((Metadata*)e)->print_value_on(st);
3909 } else if (self != nullptr && e > 0 && e < 0x10000) {
3910 address location = self + e;
3911 int index = (int)((intptr_t*)location - start);
3912 st->print(" (offset %d => [%d])", (int)e, index);
3913 if (index >= 0 && index < len)
3914 forward_refs[index] = i;
3915 }
3916 st->cr();
3917 }
3918 }
3919
3920 static void print_vtable(vtableEntry* start, int len, outputStream* st) {
3921 return print_vtable(nullptr, reinterpret_cast<intptr_t*>(start), len, st);
3922 }
3923
3924 template<typename T>
3925 static void print_array_on(outputStream* st, Array<T>* array) {
3926 if (array == nullptr) { st->print_cr("nullptr"); return; }
3927 array->print_value_on(st); st->cr();
3928 if (Verbose || WizardMode) {
3929 for (int i = 0; i < array->length(); i++) {
3930 st->print("%d : ", i); array->at(i)->print_value_on(st); st->cr();
3931 }
3932 }
3933 }
3934
3935 static void print_array_on(outputStream* st, Array<int>* array) {
3936 if (array == nullptr) { st->print_cr("nullptr"); return; }
3937 array->print_value_on(st); st->cr();
3938 if (Verbose || WizardMode) {
3939 for (int i = 0; i < array->length(); i++) {
3940 st->print("%d : %d", i, array->at(i)); st->cr();
3941 }
3942 }
3943 }
3944
3945 const char* InstanceKlass::init_state_name() const {
3946 return state_names[init_state()];
3947 }
3948
3949 void InstanceKlass::print_on(outputStream* st) const {
3950 assert(is_klass(), "must be klass");
3951 Klass::print_on(st);
3952
3953 st->print(BULLET"instance size: %d", size_helper()); st->cr();
3954 st->print(BULLET"klass size: %d", size()); st->cr();
3955 st->print(BULLET"access: "); access_flags().print_on(st); st->cr();
3956 st->print(BULLET"flags: "); _misc_flags.print_on(st); st->cr();
3957 st->print(BULLET"state: "); st->print_cr("%s", init_state_name());
3958 st->print(BULLET"name: "); name()->print_value_on(st); st->cr();
3959 st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
3960 st->print(BULLET"sub: ");
3961 Klass* sub = subklass();
3962 int n;
3963 for (n = 0; sub != nullptr; n++, sub = sub->next_sibling()) {
3964 if (n < MaxSubklassPrintSize) {
3965 sub->print_value_on(st);
3966 st->print(" ");
3967 }
3968 }
3969 if (n >= MaxSubklassPrintSize) st->print("(%zd more klasses...)", n - MaxSubklassPrintSize);
3970 st->cr();
3971
3972 if (is_interface()) {
3973 st->print_cr(BULLET"nof implementors: %d", nof_implementors());
3974 if (nof_implementors() == 1) {
3975 st->print_cr(BULLET"implementor: ");
3976 st->print(" ");
3977 implementor()->print_value_on(st);
3978 st->cr();
3979 }
3980 }
3981
3982 st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
3983 st->print(BULLET"methods: "); print_array_on(st, methods());
3984 st->print(BULLET"method ordering: "); print_array_on(st, method_ordering());
3985 if (default_methods() != nullptr) {
3986 st->print(BULLET"default_methods: "); print_array_on(st, default_methods());
3987 }
3988 print_on_maybe_null(st, BULLET"default vtable indices: ", default_vtable_indices());
3989 st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr();
3990 st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
3991
3992 st->print(BULLET"secondary supers: "); secondary_supers()->print_value_on(st); st->cr();
3993
3994 st->print(BULLET"hash_slot: %d", hash_slot()); st->cr();
3995 st->print(BULLET"secondary bitmap: " UINTX_FORMAT_X_0, _secondary_supers_bitmap); st->cr();
3996
3997 if (secondary_supers() != nullptr) {
3998 if (Verbose) {
3999 bool is_hashed = (_secondary_supers_bitmap != SECONDARY_SUPERS_BITMAP_FULL);
4000 st->print_cr(BULLET"---- secondary supers (%d words):", _secondary_supers->length());
4001 for (int i = 0; i < _secondary_supers->length(); i++) {
4002 ResourceMark rm; // for external_name()
4003 Klass* secondary_super = _secondary_supers->at(i);
4004 st->print(BULLET"%2d:", i);
4005 if (is_hashed) {
4006 int home_slot = compute_home_slot(secondary_super, _secondary_supers_bitmap);
4007 int distance = (i - home_slot) & SECONDARY_SUPERS_TABLE_MASK;
4008 st->print(" dist:%02d:", distance);
4009 }
4010 st->print_cr(" %p %s", secondary_super, secondary_super->external_name());
4011 }
4012 }
4013 }
4014 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr();
4015
4016 print_on_maybe_null(st, BULLET"class loader data: ", class_loader_data());
4017 print_on_maybe_null(st, BULLET"source file: ", source_file_name());
4018 if (source_debug_extension() != nullptr) {
4019 st->print(BULLET"source debug extension: ");
4020 st->print("%s", source_debug_extension());
4021 st->cr();
4022 }
4023 print_on_maybe_null(st, BULLET"class annotations: ", class_annotations());
4024 print_on_maybe_null(st, BULLET"class type annotations: ", class_type_annotations());
4025 print_on_maybe_null(st, BULLET"field annotations: ", fields_annotations());
4026 print_on_maybe_null(st, BULLET"field type annotations: ", fields_type_annotations());
4027 {
4028 bool have_pv = false;
4029 // previous versions are linked together through the InstanceKlass
4030 for (InstanceKlass* pv_node = previous_versions();
4031 pv_node != nullptr;
4032 pv_node = pv_node->previous_versions()) {
4033 if (!have_pv)
4034 st->print(BULLET"previous version: ");
4035 have_pv = true;
4036 pv_node->constants()->print_value_on(st);
4037 }
4038 if (have_pv) st->cr();
4039 }
4040
4041 print_on_maybe_null(st, BULLET"generic signature: ", generic_signature());
4042 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr();
4043 st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr();
4044 print_on_maybe_null(st, BULLET"record components: ", record_components());
4045 st->print(BULLET"permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr();
4046 st->print(BULLET"loadable descriptors: "); loadable_descriptors()->print_value_on(st); st->cr();
4047 if (java_mirror() != nullptr) {
4048 st->print(BULLET"java mirror: ");
4049 java_mirror()->print_value_on(st);
4050 st->cr();
4051 } else {
4052 st->print_cr(BULLET"java mirror: null");
4053 }
4054 st->print(BULLET"vtable length %d (start addr: " PTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr();
4055 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st);
4056 st->print(BULLET"itable length %d (start addr: " PTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr();
4057 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(nullptr, start_of_itable(), itable_length(), st);
4058 st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
4059
4060 FieldPrinter print_static_field(st);
4061 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
4062 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
4063 FieldPrinter print_nonstatic_field(st);
4064 InstanceKlass* ik = const_cast<InstanceKlass*>(this);
4065 ik->print_nonstatic_fields(&print_nonstatic_field);
4066
4067 st->print(BULLET"non-static oop maps (%d entries): ", nonstatic_oop_map_count());
4068 OopMapBlock* map = start_of_nonstatic_oop_maps();
4069 OopMapBlock* end_map = map + nonstatic_oop_map_count();
4070 while (map < end_map) {
4071 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
4072 map++;
4073 }
4074 st->cr();
4075
4076 if (fieldinfo_search_table() != nullptr) {
4077 st->print_cr(BULLET"---- field info search table:");
4078 FieldInfoStream::print_search_table(st, _constants, _fieldinfo_stream, _fieldinfo_search_table);
4079 }
4080 }
4081
4082 void InstanceKlass::print_value_on(outputStream* st) const {
4083 assert(is_klass(), "must be klass");
4084 if (Verbose || WizardMode) access_flags().print_on(st);
4085 name()->print_value_on(st);
4086 }
4087
4088 void FieldPrinter::do_field(fieldDescriptor* fd) {
4089 for (int i = 0; i < _indent; i++) _st->print(" ");
4090 _st->print(BULLET);
4091 if (_obj == nullptr) {
4092 fd->print_on(_st, _base_offset);
4093 _st->cr();
4094 } else {
4095 fd->print_on_for(_st, _obj, _indent, _base_offset);
4096 if (!fd->field_flags().is_flat()) _st->cr();
4097 }
4098 }
4099
4100
4101 void InstanceKlass::oop_print_on(oop obj, outputStream* st, int indent, int base_offset) {
4102 Klass::oop_print_on(obj, st);
4103
4104 if (this == vmClasses::String_klass()) {
4105 typeArrayOop value = java_lang_String::value(obj);
4106 juint length = java_lang_String::length(obj);
4107 if (value != nullptr &&
4108 value->is_typeArray() &&
4109 length <= (juint) value->length()) {
4110 st->print(BULLET"string: ");
4111 java_lang_String::print(obj, st);
4112 st->cr();
4113 }
4114 }
4115
4116 st->print_cr(BULLET"---- fields (total size %zu words):", oop_size(obj));
4117 FieldPrinter print_field(st, obj, indent, base_offset);
4118 print_nonstatic_fields(&print_field);
4119
4120 if (this == vmClasses::Class_klass()) {
4121 st->print(BULLET"signature: ");
4122 java_lang_Class::print_signature(obj, st);
4123 st->cr();
4124 Klass* real_klass = java_lang_Class::as_Klass(obj);
4125 if (real_klass != nullptr && real_klass->is_instance_klass()) {
4126 st->print_cr(BULLET"---- static fields (%d):", java_lang_Class::static_oop_field_count(obj));
4127 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
4128 }
4129 } else if (this == vmClasses::MethodType_klass()) {
4130 st->print(BULLET"signature: ");
4131 java_lang_invoke_MethodType::print_signature(obj, st);
4132 st->cr();
4133 }
4134 }
4135
4136 #ifndef PRODUCT
4137
4138 bool InstanceKlass::verify_itable_index(int i) {
4139 int method_count = klassItable::method_count_for_interface(this);
4140 assert(i >= 0 && i < method_count, "index out of bounds");
4141 return true;
4142 }
4143
4144 #endif //PRODUCT
4145
4146 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
4147 st->print("a ");
4148 name()->print_value_on(st);
4149 obj->print_address_on(st);
4150 if (this == vmClasses::String_klass()
4151 && java_lang_String::value(obj) != nullptr) {
4152 ResourceMark rm;
4153 int len = java_lang_String::length(obj);
4154 int plen = (len < 24 ? len : 12);
4155 char* str = java_lang_String::as_utf8_string(obj, 0, plen);
4156 st->print(" = \"%s\"", str);
4157 if (len > plen)
4158 st->print("...[%d]", len);
4159 } else if (this == vmClasses::Class_klass()) {
4160 Klass* k = java_lang_Class::as_Klass(obj);
4161 st->print(" = ");
4162 if (k != nullptr) {
4163 k->print_value_on(st);
4164 } else {
4165 const char* tname = type2name(java_lang_Class::primitive_type(obj));
4166 st->print("%s", tname ? tname : "type?");
4167 }
4168 } else if (this == vmClasses::MethodType_klass()) {
4169 st->print(" = ");
4170 java_lang_invoke_MethodType::print_signature(obj, st);
4171 } else if (java_lang_boxing_object::is_instance(obj)) {
4172 st->print(" = ");
4173 java_lang_boxing_object::print(obj, st);
4174 } else if (this == vmClasses::LambdaForm_klass()) {
4175 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
4176 if (vmentry != nullptr) {
4177 st->print(" => ");
4178 vmentry->print_value_on(st);
4179 }
4180 } else if (this == vmClasses::MemberName_klass()) {
4181 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
4182 if (vmtarget != nullptr) {
4183 st->print(" = ");
4184 vmtarget->print_value_on(st);
4185 } else {
4186 oop clazz = java_lang_invoke_MemberName::clazz(obj);
4187 oop name = java_lang_invoke_MemberName::name(obj);
4188 if (clazz != nullptr) {
4189 clazz->print_value_on(st);
4190 } else {
4191 st->print("null");
4192 }
4193 st->print(".");
4194 if (name != nullptr) {
4195 name->print_value_on(st);
4196 } else {
4197 st->print("null");
4198 }
4199 }
4200 }
4201 }
4202
4203 const char* InstanceKlass::internal_name() const {
4204 return external_name();
4205 }
4206
4207 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
4208 const ModuleEntry* module_entry,
4209 const ClassFileStream* cfs) const {
4210
4211 if (ClassListWriter::is_enabled()) {
4212 ClassListWriter::write(this, cfs);
4213 }
4214
4215 print_class_load_helper(loader_data, module_entry, cfs);
4216 print_class_load_cause_logging();
4217 }
4218
4219 void InstanceKlass::print_class_load_helper(ClassLoaderData* loader_data,
4220 const ModuleEntry* module_entry,
4221 const ClassFileStream* cfs) const {
4222
4223 if (!log_is_enabled(Info, class, load)) {
4224 return;
4225 }
4226
4227 ResourceMark rm;
4228 LogMessage(class, load) msg;
4229 stringStream info_stream;
4230
4231 // Name and class hierarchy info
4232 info_stream.print("%s", external_name());
4233
4234 // Source
4235 if (cfs != nullptr) {
4236 if (cfs->source() != nullptr) {
4237 const char* module_name = (module_entry->name() == nullptr) ? UNNAMED_MODULE : module_entry->name()->as_C_string();
4238 if (module_name != nullptr) {
4239 // When the boot loader created the stream, it didn't know the module name
4240 // yet. Let's format it now.
4241 if (cfs->from_boot_loader_modules_image()) {
4242 info_stream.print(" source: jrt:/%s", module_name);
4243 } else {
4244 info_stream.print(" source: %s", cfs->source());
4245 }
4246 } else {
4247 info_stream.print(" source: %s", cfs->source());
4248 }
4249 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
4250 Thread* current = Thread::current();
4251 Klass* caller = current->is_Java_thread() ?
4252 JavaThread::cast(current)->security_get_caller_class(1):
4253 nullptr;
4254 // caller can be null, for example, during a JVMTI VM_Init hook
4255 if (caller != nullptr) {
4256 info_stream.print(" source: instance of %s", caller->external_name());
4257 } else {
4258 // source is unknown
4259 }
4260 } else {
4261 oop class_loader = loader_data->class_loader();
4262 info_stream.print(" source: %s", class_loader->klass()->external_name());
4263 }
4264 } else {
4265 assert(this->in_aot_cache(), "must be");
4266 if (AOTMetaspace::in_aot_cache_dynamic_region((void*)this)) {
4267 info_stream.print(" source: shared objects file (top)");
4268 } else {
4269 info_stream.print(" source: shared objects file");
4270 }
4271 }
4272
4273 msg.info("%s", info_stream.as_string());
4274
4275 if (log_is_enabled(Debug, class, load)) {
4276 stringStream debug_stream;
4277
4278 // Class hierarchy info
4279 debug_stream.print(" klass: " PTR_FORMAT " super: " PTR_FORMAT,
4280 p2i(this), p2i(super()));
4281
4282 // Interfaces
4283 if (local_interfaces() != nullptr && local_interfaces()->length() > 0) {
4284 debug_stream.print(" interfaces:");
4285 int length = local_interfaces()->length();
4286 for (int i = 0; i < length; i++) {
4287 debug_stream.print(" " PTR_FORMAT,
4288 p2i(local_interfaces()->at(i)));
4289 }
4290 }
4291
4292 // Class loader
4293 debug_stream.print(" loader: [");
4294 loader_data->print_value_on(&debug_stream);
4295 debug_stream.print("]");
4296
4297 // Classfile checksum
4298 if (cfs) {
4299 debug_stream.print(" bytes: %d checksum: %08x",
4300 cfs->length(),
4301 ClassLoader::crc32(0, (const char*)cfs->buffer(),
4302 cfs->length()));
4303 }
4304
4305 msg.debug("%s", debug_stream.as_string());
4306 }
4307 }
4308
4309 void InstanceKlass::print_class_load_cause_logging() const {
4310 bool log_cause_native = log_is_enabled(Info, class, load, cause, native);
4311 if (log_cause_native || log_is_enabled(Info, class, load, cause)) {
4312 JavaThread* current = JavaThread::current();
4313 ResourceMark rm(current);
4314 const char* name = external_name();
4315
4316 if (LogClassLoadingCauseFor == nullptr ||
4317 (strcmp("*", LogClassLoadingCauseFor) != 0 &&
4318 strstr(name, LogClassLoadingCauseFor) == nullptr)) {
4319 return;
4320 }
4321
4322 // Log Java stack first
4323 {
4324 LogMessage(class, load, cause) msg;
4325 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4326
4327 info_stream.print_cr("Java stack when loading %s:", name);
4328 current->print_stack_on(&info_stream);
4329 }
4330
4331 // Log native stack second
4332 if (log_cause_native) {
4333 // Log to string first so that lines can be indented
4334 stringStream stack_stream;
4335 char buf[O_BUFLEN];
4336 address lastpc = nullptr;
4337 NativeStackPrinter nsp(current);
4338 nsp.print_stack(&stack_stream, buf, sizeof(buf), lastpc,
4339 true /* print_source_info */, -1 /* max stack */);
4340
4341 LogMessage(class, load, cause, native) msg;
4342 NonInterleavingLogStream info_stream{LogLevelType::Info, msg};
4343 info_stream.print_cr("Native stack when loading %s:", name);
4344
4345 // Print each native stack line to the log
4346 int size = (int) stack_stream.size();
4347 char* stack = stack_stream.as_string();
4348 char* stack_end = stack + size;
4349 char* line_start = stack;
4350 for (char* p = stack; p < stack_end; p++) {
4351 if (*p == '\n') {
4352 *p = '\0';
4353 info_stream.print_cr("\t%s", line_start);
4354 line_start = p + 1;
4355 }
4356 }
4357 if (line_start < stack_end) {
4358 info_stream.print_cr("\t%s", line_start);
4359 }
4360 }
4361 }
4362 }
4363
4364 // Verification
4365
4366 class VerifyFieldClosure: public BasicOopIterateClosure {
4367 protected:
4368 template <class T> void do_oop_work(T* p) {
4369 oop obj = RawAccess<>::oop_load(p);
4370 if (!oopDesc::is_oop_or_null(obj)) {
4371 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj));
4372 Universe::print_on(tty);
4373 guarantee(false, "boom");
4374 }
4375 }
4376 public:
4377 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); }
4378 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
4379 };
4380
4381 void InstanceKlass::verify_on(outputStream* st) {
4382 #ifndef PRODUCT
4383 // Avoid redundant verifies, this really should be in product.
4384 if (_verify_count == Universe::verify_count()) return;
4385 _verify_count = Universe::verify_count();
4386 #endif
4387
4388 // Verify Klass
4389 Klass::verify_on(st);
4390
4391 // Verify that klass is present in ClassLoaderData
4392 guarantee(class_loader_data()->contains_klass(this),
4393 "this class isn't found in class loader data");
4394
4395 // Verify vtables
4396 if (is_linked()) {
4397 // $$$ This used to be done only for m/s collections. Doing it
4398 // always seemed a valid generalization. (DLD -- 6/00)
4399 vtable().verify(st);
4400 }
4401
4402 // Verify first subklass
4403 if (subklass() != nullptr) {
4404 guarantee(subklass()->is_klass(), "should be klass");
4405 }
4406
4407 // Verify siblings
4408 Klass* super = this->super();
4409 Klass* sib = next_sibling();
4410 if (sib != nullptr) {
4411 if (sib == this) {
4412 fatal("subclass points to itself " PTR_FORMAT, p2i(sib));
4413 }
4414
4415 guarantee(sib->is_klass(), "should be klass");
4416 guarantee(sib->super() == super, "siblings should have same superklass");
4417 }
4418
4419 // Verify local interfaces
4420 if (local_interfaces()) {
4421 Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
4422 for (int j = 0; j < local_interfaces->length(); j++) {
4423 InstanceKlass* e = local_interfaces->at(j);
4424 guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
4425 }
4426 }
4427
4428 // Verify transitive interfaces
4429 if (transitive_interfaces() != nullptr) {
4430 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
4431 for (int j = 0; j < transitive_interfaces->length(); j++) {
4432 InstanceKlass* e = transitive_interfaces->at(j);
4433 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
4434 }
4435 }
4436
4437 // Verify methods
4438 if (methods() != nullptr) {
4439 Array<Method*>* methods = this->methods();
4440 for (int j = 0; j < methods->length(); j++) {
4441 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4442 }
4443 for (int j = 0; j < methods->length() - 1; j++) {
4444 Method* m1 = methods->at(j);
4445 Method* m2 = methods->at(j + 1);
4446 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4447 }
4448 }
4449
4450 // Verify method ordering
4451 if (method_ordering() != nullptr) {
4452 Array<int>* method_ordering = this->method_ordering();
4453 int length = method_ordering->length();
4454 if (JvmtiExport::can_maintain_original_method_order() ||
4455 ((CDSConfig::is_using_archive() || CDSConfig::is_dumping_archive()) && length != 0)) {
4456 guarantee(length == methods()->length(), "invalid method ordering length");
4457 jlong sum = 0;
4458 for (int j = 0; j < length; j++) {
4459 int original_index = method_ordering->at(j);
4460 guarantee(original_index >= 0, "invalid method ordering index");
4461 guarantee(original_index < length, "invalid method ordering index");
4462 sum += original_index;
4463 }
4464 // Verify sum of indices 0,1,...,length-1
4465 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
4466 } else {
4467 guarantee(length == 0, "invalid method ordering length");
4468 }
4469 }
4470
4471 // Verify default methods
4472 if (default_methods() != nullptr) {
4473 Array<Method*>* methods = this->default_methods();
4474 for (int j = 0; j < methods->length(); j++) {
4475 guarantee(methods->at(j)->is_method(), "non-method in methods array");
4476 }
4477 for (int j = 0; j < methods->length() - 1; j++) {
4478 Method* m1 = methods->at(j);
4479 Method* m2 = methods->at(j + 1);
4480 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
4481 }
4482 }
4483
4484 // Verify JNI static field identifiers
4485 if (jni_ids() != nullptr) {
4486 jni_ids()->verify(this);
4487 }
4488
4489 // Verify other fields
4490 if (constants() != nullptr) {
4491 guarantee(constants()->is_constantPool(), "should be constant pool");
4492 }
4493 }
4494
4495 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
4496 Klass::oop_verify_on(obj, st);
4497 VerifyFieldClosure blk;
4498 obj->oop_iterate(&blk);
4499 }
4500
4501 // JNIid class for jfieldIDs only
4502 // Note to reviewers:
4503 // These JNI functions are just moved over to column 1 and not changed
4504 // in the compressed oops workspace.
4505 JNIid::JNIid(InstanceKlass* holder, int offset, JNIid* next) {
4506 _holder = holder;
4507 _offset = offset;
4508 _next = next;
4509 DEBUG_ONLY(_is_static_field_id = false;)
4510 }
4511
4512 JNIid* JNIid::find(int offset) {
4513 JNIid* current = this;
4514 while (current != nullptr) {
4515 if (current->offset() == offset) return current;
4516 current = current->next();
4517 }
4518 return nullptr;
4519 }
4520
4521 void JNIid::deallocate(JNIid* current) {
4522 while (current != nullptr) {
4523 JNIid* next = current->next();
4524 delete current;
4525 current = next;
4526 }
4527 }
4528
4529 void JNIid::verify(InstanceKlass* holder) {
4530 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields();
4531 int end_field_offset;
4532 end_field_offset = first_field_offset + (holder->static_field_size() * wordSize);
4533
4534 JNIid* current = this;
4535 while (current != nullptr) {
4536 guarantee(current->holder() == holder, "Invalid klass in JNIid");
4537 #ifdef ASSERT
4538 int o = current->offset();
4539 if (current->is_static_field_id()) {
4540 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid");
4541 }
4542 #endif
4543 current = current->next();
4544 }
4545 }
4546
4547 void InstanceKlass::set_init_state(ClassState state) {
4548 #ifdef ASSERT
4549 bool good_state = in_aot_cache() ? (_init_state <= state)
4550 : (_init_state < state);
4551 assert(good_state || state == allocated, "illegal state transition");
4552 #endif
4553 assert(_init_thread == nullptr, "should be cleared before state change");
4554 AtomicAccess::release_store(&_init_state, state);
4555 }
4556
4557 #if INCLUDE_JVMTI
4558
4559 // RedefineClasses() support for previous versions
4560
4561 // Globally, there is at least one previous version of a class to walk
4562 // during class unloading, which is saved because old methods in the class
4563 // are still running. Otherwise the previous version list is cleaned up.
4564 bool InstanceKlass::_should_clean_previous_versions = false;
4565
4566 // Returns true if there are previous versions of a class for class
4567 // unloading only. Also resets the flag to false. purge_previous_version
4568 // will set the flag to true if there are any left, i.e., if there's any
4569 // work to do for next time. This is to avoid the expensive code cache
4570 // walk in CLDG::clean_deallocate_lists().
4571 bool InstanceKlass::should_clean_previous_versions_and_reset() {
4572 bool ret = _should_clean_previous_versions;
4573 log_trace(redefine, class, iklass, purge)("Class unloading: should_clean_previous_versions = %s",
4574 ret ? "true" : "false");
4575 _should_clean_previous_versions = false;
4576 return ret;
4577 }
4578
4579 // This nulls out the jmethodID for all obsolete methods in the previous version of the 'klass'.
4580 // These obsolete methods only exist in the previous version and we're about to delete the memory for them.
4581 // The jmethodID for these are deallocated when we unload the class, so this doesn't remove them from the table.
4582 void InstanceKlass::clear_obsolete_jmethod_ids(InstanceKlass* klass) {
4583 Array<Method*>* method_refs = klass->methods();
4584 for (int k = 0; k < method_refs->length(); k++) {
4585 Method* method = method_refs->at(k);
4586 // Only need to clear obsolete methods.
4587 if (method != nullptr && method->is_obsolete()) {
4588 method->clear_jmethod_id();
4589 }
4590 }
4591 }
4592
4593 // Purge previous versions before adding new previous versions of the class and
4594 // during class unloading.
4595 void InstanceKlass::purge_previous_version_list() {
4596 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
4597 assert(has_been_redefined(), "Should only be called for main class");
4598
4599 // Quick exit.
4600 if (previous_versions() == nullptr) {
4601 return;
4602 }
4603
4604 // This klass has previous versions so see what we can cleanup
4605 // while it is safe to do so.
4606
4607 int deleted_count = 0; // leave debugging breadcrumbs
4608 int live_count = 0;
4609 ClassLoaderData* loader_data = class_loader_data();
4610 assert(loader_data != nullptr, "should never be null");
4611
4612 ResourceMark rm;
4613 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name());
4614
4615 // previous versions are linked together through the InstanceKlass
4616 InstanceKlass* pv_node = previous_versions();
4617 InstanceKlass* last = this;
4618 int version = 0;
4619
4620 // check the previous versions list
4621 for (; pv_node != nullptr; ) {
4622
4623 ConstantPool* pvcp = pv_node->constants();
4624 assert(pvcp != nullptr, "cp ref was unexpectedly cleared");
4625
4626 if (!pvcp->on_stack()) {
4627 // If the constant pool isn't on stack, none of the methods
4628 // are executing. Unlink this previous_version.
4629 // The previous version InstanceKlass is on the ClassLoaderData deallocate list
4630 // so will be deallocated during the next phase of class unloading.
4631 log_trace(redefine, class, iklass, purge)
4632 ("previous version " PTR_FORMAT " is dead.", p2i(pv_node));
4633 // Unlink from previous version list.
4634 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data");
4635 InstanceKlass* next = pv_node->previous_versions();
4636 clear_obsolete_jmethod_ids(pv_node); // jmethodID maintenance for the unloaded class
4637 pv_node->link_previous_versions(nullptr); // point next to null
4638 last->link_previous_versions(next);
4639 // Delete this node directly. Nothing is referring to it and we don't
4640 // want it to increase the counter for metadata to delete in CLDG.
4641 MetadataFactory::free_metadata(loader_data, pv_node);
4642 pv_node = next;
4643 deleted_count++;
4644 version++;
4645 continue;
4646 } else {
4647 assert(pvcp->pool_holder() != nullptr, "Constant pool with no holder");
4648 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
4649 live_count++;
4650 if (pvcp->in_aot_cache()) {
4651 // Shared previous versions can never be removed so no cleaning is needed.
4652 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is shared", p2i(pv_node));
4653 } else {
4654 // Previous version alive, set that clean is needed for next time.
4655 _should_clean_previous_versions = true;
4656 log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is alive", p2i(pv_node));
4657 }
4658 }
4659
4660 // next previous version
4661 last = pv_node;
4662 pv_node = pv_node->previous_versions();
4663 version++;
4664 }
4665 log_trace(redefine, class, iklass, purge)
4666 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
4667 }
4668
4669 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
4670 int emcp_method_count) {
4671 int obsolete_method_count = old_methods->length() - emcp_method_count;
4672
4673 if (emcp_method_count != 0 && obsolete_method_count != 0 &&
4674 _previous_versions != nullptr) {
4675 // We have a mix of obsolete and EMCP methods so we have to
4676 // clear out any matching EMCP method entries the hard way.
4677 int local_count = 0;
4678 for (int i = 0; i < old_methods->length(); i++) {
4679 Method* old_method = old_methods->at(i);
4680 if (old_method->is_obsolete()) {
4681 // only obsolete methods are interesting
4682 Symbol* m_name = old_method->name();
4683 Symbol* m_signature = old_method->signature();
4684
4685 // previous versions are linked together through the InstanceKlass
4686 int j = 0;
4687 for (InstanceKlass* prev_version = _previous_versions;
4688 prev_version != nullptr;
4689 prev_version = prev_version->previous_versions(), j++) {
4690
4691 Array<Method*>* method_refs = prev_version->methods();
4692 for (int k = 0; k < method_refs->length(); k++) {
4693 Method* method = method_refs->at(k);
4694
4695 if (!method->is_obsolete() &&
4696 method->name() == m_name &&
4697 method->signature() == m_signature) {
4698 // The current RedefineClasses() call has made all EMCP
4699 // versions of this method obsolete so mark it as obsolete
4700 log_trace(redefine, class, iklass, add)
4701 ("%s(%s): flush obsolete method @%d in version @%d",
4702 m_name->as_C_string(), m_signature->as_C_string(), k, j);
4703
4704 method->set_is_obsolete();
4705 break;
4706 }
4707 }
4708
4709 // The previous loop may not find a matching EMCP method, but
4710 // that doesn't mean that we can optimize and not go any
4711 // further back in the PreviousVersion generations. The EMCP
4712 // method for this generation could have already been made obsolete,
4713 // but there still may be an older EMCP method that has not
4714 // been made obsolete.
4715 }
4716
4717 if (++local_count >= obsolete_method_count) {
4718 // no more obsolete methods so bail out now
4719 break;
4720 }
4721 }
4722 }
4723 }
4724 }
4725
4726 // Save the scratch_class as the previous version if any of the methods are running.
4727 // The previous_versions are used to set breakpoints in EMCP methods and they are
4728 // also used to clean MethodData links to redefined methods that are no longer running.
4729 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
4730 int emcp_method_count) {
4731 assert(Thread::current()->is_VM_thread(),
4732 "only VMThread can add previous versions");
4733
4734 ResourceMark rm;
4735 log_trace(redefine, class, iklass, add)
4736 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
4737
4738 // Clean out old previous versions for this class
4739 purge_previous_version_list();
4740
4741 // Mark newly obsolete methods in remaining previous versions. An EMCP method from
4742 // a previous redefinition may be made obsolete by this redefinition.
4743 Array<Method*>* old_methods = scratch_class->methods();
4744 mark_newly_obsolete_methods(old_methods, emcp_method_count);
4745
4746 // If the constant pool for this previous version of the class
4747 // is not marked as being on the stack, then none of the methods
4748 // in this previous version of the class are on the stack so
4749 // we don't need to add this as a previous version.
4750 ConstantPool* cp_ref = scratch_class->constants();
4751 if (!cp_ref->on_stack()) {
4752 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
4753 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
4754 return;
4755 }
4756
4757 // Add previous version if any methods are still running or if this is
4758 // a shared class which should never be removed.
4759 assert(scratch_class->previous_versions() == nullptr, "shouldn't have a previous version");
4760 scratch_class->link_previous_versions(previous_versions());
4761 link_previous_versions(scratch_class);
4762 if (cp_ref->in_aot_cache()) {
4763 log_trace(redefine, class, iklass, add) ("scratch class added; class is shared");
4764 } else {
4765 // We only set clean_previous_versions flag for processing during class
4766 // unloading for non-shared classes.
4767 _should_clean_previous_versions = true;
4768 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
4769 }
4770 } // end add_previous_version()
4771
4772 #endif // INCLUDE_JVMTI
4773
4774 Method* InstanceKlass::method_with_idnum(int idnum) const {
4775 Method* m = nullptr;
4776 if (idnum < methods()->length()) {
4777 m = methods()->at(idnum);
4778 }
4779 if (m == nullptr || m->method_idnum() != idnum) {
4780 for (int index = 0; index < methods()->length(); ++index) {
4781 m = methods()->at(index);
4782 if (m->method_idnum() == idnum) {
4783 return m;
4784 }
4785 }
4786 // None found, return null for the caller to handle.
4787 return nullptr;
4788 }
4789 return m;
4790 }
4791
4792
4793 Method* InstanceKlass::method_with_orig_idnum(int idnum) const {
4794 if (idnum >= methods()->length()) {
4795 return nullptr;
4796 }
4797 Method* m = methods()->at(idnum);
4798 if (m != nullptr && m->orig_method_idnum() == idnum) {
4799 return m;
4800 }
4801 // Obsolete method idnum does not match the original idnum
4802 for (int index = 0; index < methods()->length(); ++index) {
4803 m = methods()->at(index);
4804 if (m->orig_method_idnum() == idnum) {
4805 return m;
4806 }
4807 }
4808 // None found, return null for the caller to handle.
4809 return nullptr;
4810 }
4811
4812
4813 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) const {
4814 const InstanceKlass* holder = get_klass_version(version);
4815 if (holder == nullptr) {
4816 return nullptr; // The version of klass is gone, no method is found
4817 }
4818 return holder->method_with_orig_idnum(idnum);
4819 }
4820
4821 #if INCLUDE_JVMTI
4822 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
4823 return _cached_class_file;
4824 }
4825
4826 jint InstanceKlass::get_cached_class_file_len() {
4827 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
4828 }
4829
4830 unsigned char * InstanceKlass::get_cached_class_file_bytes() {
4831 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
4832 }
4833 #endif
4834
4835 // Make a step iterating over the class hierarchy under the root class.
4836 // Skips subclasses if requested.
4837 void ClassHierarchyIterator::next() {
4838 assert(_current != nullptr, "required");
4839 if (_visit_subclasses && _current->subklass() != nullptr) {
4840 _current = _current->subklass();
4841 return; // visit next subclass
4842 }
4843 _visit_subclasses = true; // reset
4844 while (_current->next_sibling() == nullptr && _current != _root) {
4845 _current = _current->java_super(); // backtrack; no more sibling subclasses left
4846 }
4847 if (_current == _root) {
4848 // Iteration is over (back at root after backtracking). Invalidate the iterator.
4849 _current = nullptr;
4850 return;
4851 }
4852 _current = _current->next_sibling();
4853 return; // visit next sibling subclass
4854 }