1 /*
2 * Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "jvm.h"
27 #include "cds/archiveUtils.hpp"
28 #include "cds/classListWriter.hpp"
29 #include "cds/metaspaceShared.hpp"
30 #include "classfile/classFileParser.hpp"
31 #include "classfile/classFileStream.hpp"
32 #include "classfile/classLoader.hpp"
33 #include "classfile/classLoaderData.inline.hpp"
34 #include "classfile/javaClasses.hpp"
35 #include "classfile/moduleEntry.hpp"
36 #include "classfile/resolutionErrors.hpp"
37 #include "classfile/symbolTable.hpp"
38 #include "classfile/systemDictionary.hpp"
39 #include "classfile/systemDictionaryShared.hpp"
40 #include "classfile/verifier.hpp"
41 #include "classfile/vmClasses.hpp"
42 #include "classfile/vmSymbols.hpp"
43 #include "code/codeCache.hpp"
44 #include "code/dependencyContext.hpp"
45 #include "compiler/compilationPolicy.hpp"
46 #include "compiler/compileBroker.hpp"
47 #include "gc/shared/collectedHeap.inline.hpp"
48 #include "interpreter/oopMapCache.hpp"
49 #include "interpreter/rewriter.hpp"
50 #include "jvmtifiles/jvmti.h"
51 #include "logging/log.hpp"
52 #include "logging/logMessage.hpp"
53 #include "logging/logStream.hpp"
54 #include "memory/allocation.inline.hpp"
55 #include "memory/iterator.inline.hpp"
56 #include "memory/metadataFactory.hpp"
57 #include "memory/metaspaceClosure.hpp"
58 #include "memory/oopFactory.hpp"
59 #include "memory/resourceArea.hpp"
60 #include "memory/universe.hpp"
61 #include "oops/fieldStreams.inline.hpp"
62 #include "oops/constantPool.hpp"
63 #include "oops/instanceClassLoaderKlass.hpp"
64 #include "oops/instanceKlass.inline.hpp"
65 #include "oops/instanceMirrorKlass.hpp"
66 #include "oops/instanceOop.hpp"
67 #include "oops/klass.inline.hpp"
68 #include "oops/method.hpp"
69 #include "oops/oop.inline.hpp"
70 #include "oops/recordComponent.hpp"
71 #include "oops/symbol.hpp"
72 #include "prims/jvmtiExport.hpp"
73 #include "prims/jvmtiRedefineClasses.hpp"
74 #include "prims/jvmtiThreadState.hpp"
75 #include "prims/methodComparator.hpp"
76 #include "runtime/arguments.hpp"
77 #include "runtime/atomic.hpp"
78 #include "runtime/biasedLocking.hpp"
79 #include "runtime/fieldDescriptor.inline.hpp"
80 #include "runtime/handles.inline.hpp"
81 #include "runtime/javaCalls.hpp"
82 #include "runtime/mutexLocker.hpp"
83 #include "runtime/orderAccess.hpp"
84 #include "runtime/reflectionUtils.hpp"
85 #include "runtime/thread.inline.hpp"
86 #include "services/classLoadingService.hpp"
87 #include "services/threadService.hpp"
88 #include "utilities/dtrace.hpp"
89 #include "utilities/events.hpp"
90 #include "utilities/macros.hpp"
91 #include "utilities/stringUtils.hpp"
92 #ifdef COMPILER1
93 #include "c1/c1_Compiler.hpp"
94 #endif
95 #if INCLUDE_JFR
96 #include "jfr/jfrEvents.hpp"
97 #endif
98
99
100 #ifdef DTRACE_ENABLED
101
102
103 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
104 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
105 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
106 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
107 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
108 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
109 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
110 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
111 #define DTRACE_CLASSINIT_PROBE(type, thread_type) \
112 { \
113 char* data = NULL; \
114 int len = 0; \
115 Symbol* clss_name = name(); \
116 if (clss_name != NULL) { \
117 data = (char*)clss_name->bytes(); \
118 len = clss_name->utf8_length(); \
119 } \
120 HOTSPOT_CLASS_INITIALIZATION_##type( \
121 data, len, (void*)class_loader(), thread_type); \
122 }
123
124 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait) \
125 { \
126 char* data = NULL; \
127 int len = 0; \
128 Symbol* clss_name = name(); \
129 if (clss_name != NULL) { \
130 data = (char*)clss_name->bytes(); \
131 len = clss_name->utf8_length(); \
132 } \
133 HOTSPOT_CLASS_INITIALIZATION_##type( \
134 data, len, (void*)class_loader(), thread_type, wait); \
135 }
136
137 #else // ndef DTRACE_ENABLED
138
139 #define DTRACE_CLASSINIT_PROBE(type, thread_type)
140 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
141
142 #endif // ndef DTRACE_ENABLED
143
144
145 static inline bool is_class_loader(const Symbol* class_name,
146 const ClassFileParser& parser) {
147 assert(class_name != NULL, "invariant");
148
149 if (class_name == vmSymbols::java_lang_ClassLoader()) {
150 return true;
151 }
152
153 if (vmClasses::ClassLoader_klass_loaded()) {
154 const Klass* const super_klass = parser.super_klass();
155 if (super_klass != NULL) {
156 if (super_klass->is_subtype_of(vmClasses::ClassLoader_klass())) {
157 return true;
158 }
159 }
160 }
161 return false;
162 }
163
164 // private: called to verify that k is a static member of this nest.
165 // We know that k is an instance class in the same package and hence the
166 // same classloader.
167 bool InstanceKlass::has_nest_member(JavaThread* current, InstanceKlass* k) const {
168 assert(!is_hidden(), "unexpected hidden class");
169 if (_nest_members == NULL || _nest_members == Universe::the_empty_short_array()) {
170 if (log_is_enabled(Trace, class, nestmates)) {
171 ResourceMark rm(current);
172 log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s",
173 k->external_name(), this->external_name());
174 }
175 return false;
176 }
177
178 if (log_is_enabled(Trace, class, nestmates)) {
179 ResourceMark rm(current);
180 log_trace(class, nestmates)("Checking nest membership of %s in %s",
181 k->external_name(), this->external_name());
182 }
183
184 // Check for the named class in _nest_members.
185 // We don't resolve, or load, any classes.
186 for (int i = 0; i < _nest_members->length(); i++) {
187 int cp_index = _nest_members->at(i);
188 Symbol* name = _constants->klass_name_at(cp_index);
189 if (name == k->name()) {
190 log_trace(class, nestmates)("- named class found at nest_members[%d] => cp[%d]", i, cp_index);
191 return true;
192 }
193 }
194 log_trace(class, nestmates)("- class is NOT a nest member!");
195 return false;
196 }
197
198 // Called to verify that k is a permitted subclass of this class
199 bool InstanceKlass::has_as_permitted_subclass(const InstanceKlass* k) const {
200 Thread* current = Thread::current();
201 assert(k != NULL, "sanity check");
202 assert(_permitted_subclasses != NULL && _permitted_subclasses != Universe::the_empty_short_array(),
203 "unexpected empty _permitted_subclasses array");
204
205 if (log_is_enabled(Trace, class, sealed)) {
206 ResourceMark rm(current);
207 log_trace(class, sealed)("Checking for permitted subclass of %s in %s",
208 k->external_name(), this->external_name());
209 }
210
211 // Check that the class and its super are in the same module.
212 if (k->module() != this->module()) {
213 ResourceMark rm(current);
214 log_trace(class, sealed)("Check failed for same module of permitted subclass %s and sealed class %s",
215 k->external_name(), this->external_name());
216 return false;
217 }
218
219 if (!k->is_public() && !is_same_class_package(k)) {
220 ResourceMark rm(current);
221 log_trace(class, sealed)("Check failed, subclass %s not public and not in the same package as sealed class %s",
222 k->external_name(), this->external_name());
223 return false;
224 }
225
226 for (int i = 0; i < _permitted_subclasses->length(); i++) {
227 int cp_index = _permitted_subclasses->at(i);
228 Symbol* name = _constants->klass_name_at(cp_index);
229 if (name == k->name()) {
230 log_trace(class, sealed)("- Found it at permitted_subclasses[%d] => cp[%d]", i, cp_index);
231 return true;
232 }
233 }
234 log_trace(class, sealed)("- class is NOT a permitted subclass!");
235 return false;
236 }
237
238 // Return nest-host class, resolving, validating and saving it if needed.
239 // In cases where this is called from a thread that cannot do classloading
240 // (such as a native JIT thread) then we simply return NULL, which in turn
241 // causes the access check to return false. Such code will retry the access
242 // from a more suitable environment later. Otherwise the _nest_host is always
243 // set once this method returns.
244 // Any errors from nest-host resolution must be preserved so they can be queried
245 // from higher-level access checking code, and reported as part of access checking
246 // exceptions.
247 // VirtualMachineErrors are propagated with a NULL return.
248 // Under any conditions where the _nest_host can be set to non-NULL the resulting
249 // value of it and, if applicable, the nest host resolution/validation error,
250 // are idempotent.
251 InstanceKlass* InstanceKlass::nest_host(TRAPS) {
252 InstanceKlass* nest_host_k = _nest_host;
253 if (nest_host_k != NULL) {
254 return nest_host_k;
255 }
256
257 ResourceMark rm(THREAD);
258
259 // need to resolve and save our nest-host class.
260 if (_nest_host_index != 0) { // we have a real nest_host
261 // Before trying to resolve check if we're in a suitable context
262 bool can_resolve = THREAD->can_call_java();
263 if (!can_resolve && !_constants->tag_at(_nest_host_index).is_klass()) {
264 log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread",
265 this->external_name());
266 return NULL; // sentinel to say "try again from a different context"
267 }
268
269 log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s",
270 this->external_name(),
271 _constants->klass_name_at(_nest_host_index)->as_C_string());
272
273 Klass* k = _constants->klass_at(_nest_host_index, THREAD);
274 if (HAS_PENDING_EXCEPTION) {
275 if (PENDING_EXCEPTION->is_a(vmClasses::VirtualMachineError_klass())) {
276 return NULL; // propagate VMEs
277 }
278 stringStream ss;
279 char* target_host_class = _constants->klass_name_at(_nest_host_index)->as_C_string();
280 ss.print("Nest host resolution of %s with host %s failed: ",
281 this->external_name(), target_host_class);
282 java_lang_Throwable::print(PENDING_EXCEPTION, &ss);
283 const char* msg = ss.as_string(true /* on C-heap */);
284 constantPoolHandle cph(THREAD, constants());
285 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
286 CLEAR_PENDING_EXCEPTION;
287
288 log_trace(class, nestmates)("%s", msg);
289 } else {
290 // A valid nest-host is an instance class in the current package that lists this
291 // class as a nest member. If any of these conditions are not met the class is
292 // its own nest-host.
293 const char* error = NULL;
294
295 // JVMS 5.4.4 indicates package check comes first
296 if (is_same_class_package(k)) {
297 // Now check actual membership. We can't be a member if our "host" is
298 // not an instance class.
299 if (k->is_instance_klass()) {
300 nest_host_k = InstanceKlass::cast(k);
301 bool is_member = nest_host_k->has_nest_member(THREAD, this);
302 if (is_member) {
303 _nest_host = nest_host_k; // save resolved nest-host value
304
305 log_trace(class, nestmates)("Resolved nest-host of %s to %s",
306 this->external_name(), k->external_name());
307 return nest_host_k;
308 } else {
309 error = "current type is not listed as a nest member";
310 }
311 } else {
312 error = "host is not an instance class";
313 }
314 } else {
315 error = "types are in different packages";
316 }
317
318 // something went wrong, so record what and log it
319 {
320 stringStream ss;
321 ss.print("Type %s (loader: %s) is not a nest member of type %s (loader: %s): %s",
322 this->external_name(),
323 this->class_loader_data()->loader_name_and_id(),
324 k->external_name(),
325 k->class_loader_data()->loader_name_and_id(),
326 error);
327 const char* msg = ss.as_string(true /* on C-heap */);
328 constantPoolHandle cph(THREAD, constants());
329 SystemDictionary::add_nest_host_error(cph, _nest_host_index, msg);
330 log_trace(class, nestmates)("%s", msg);
331 }
332 }
333 } else {
334 log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self",
335 this->external_name());
336 }
337
338 // Either not in an explicit nest, or else an error occurred, so
339 // the nest-host is set to `this`. Any thread that sees this assignment
340 // will also see any setting of nest_host_error(), if applicable.
341 return (_nest_host = this);
342 }
343
344 // Dynamic nest member support: set this class's nest host to the given class.
345 // This occurs as part of the class definition, as soon as the instanceKlass
346 // has been created and doesn't require further resolution. The code:
347 // lookup().defineHiddenClass(bytes_for_X, NESTMATE);
348 // results in:
349 // class_of_X.set_nest_host(lookup().lookupClass().getNestHost())
350 // If it has an explicit _nest_host_index or _nest_members, these will be ignored.
351 // We also know the "host" is a valid nest-host in the same package so we can
352 // assert some of those facts.
353 void InstanceKlass::set_nest_host(InstanceKlass* host) {
354 assert(is_hidden(), "must be a hidden class");
355 assert(host != NULL, "NULL nest host specified");
356 assert(_nest_host == NULL, "current class has resolved nest-host");
357 assert(nest_host_error() == NULL, "unexpected nest host resolution error exists: %s",
358 nest_host_error());
359 assert((host->_nest_host == NULL && host->_nest_host_index == 0) ||
360 (host->_nest_host == host), "proposed host is not a valid nest-host");
361 // Can't assert this as package is not set yet:
362 // assert(is_same_class_package(host), "proposed host is in wrong package");
363
364 if (log_is_enabled(Trace, class, nestmates)) {
365 ResourceMark rm;
366 const char* msg = "";
367 // a hidden class does not expect a statically defined nest-host
368 if (_nest_host_index > 0) {
369 msg = "(the NestHost attribute in the current class is ignored)";
370 } else if (_nest_members != NULL && _nest_members != Universe::the_empty_short_array()) {
371 msg = "(the NestMembers attribute in the current class is ignored)";
372 }
373 log_trace(class, nestmates)("Injected type %s into the nest of %s %s",
374 this->external_name(),
375 host->external_name(),
376 msg);
377 }
378 // set dynamic nest host
379 _nest_host = host;
380 // Record dependency to keep nest host from being unloaded before this class.
381 ClassLoaderData* this_key = class_loader_data();
382 this_key->record_dependency(host);
383 }
384
385 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
386 // or we are k's nest_host - all of which is covered by comparing the two
387 // resolved_nest_hosts.
388 // Any exceptions (i.e. VMEs) are propagated.
389 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) {
390
391 assert(this != k, "this should be handled by higher-level code");
392
393 // Per JVMS 5.4.4 we first resolve and validate the current class, then
394 // the target class k.
395
396 InstanceKlass* cur_host = nest_host(CHECK_false);
397 if (cur_host == NULL) {
398 return false;
399 }
400
401 Klass* k_nest_host = k->nest_host(CHECK_false);
402 if (k_nest_host == NULL) {
403 return false;
404 }
405
406 bool access = (cur_host == k_nest_host);
407
408 ResourceMark rm(THREAD);
409 log_trace(class, nestmates)("Class %s does %shave nestmate access to %s",
410 this->external_name(),
411 access ? "" : "NOT ",
412 k->external_name());
413 return access;
414 }
415
416 const char* InstanceKlass::nest_host_error() {
417 if (_nest_host_index == 0) {
418 return NULL;
419 } else {
420 constantPoolHandle cph(Thread::current(), constants());
421 return SystemDictionary::find_nest_host_error(cph, (int)_nest_host_index);
422 }
423 }
424
425 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) {
426 const int size = InstanceKlass::size(parser.vtable_size(),
427 parser.itable_size(),
428 nonstatic_oop_map_size(parser.total_oop_map_count()),
429 parser.is_interface());
430
431 const Symbol* const class_name = parser.class_name();
432 assert(class_name != NULL, "invariant");
433 ClassLoaderData* loader_data = parser.loader_data();
434 assert(loader_data != NULL, "invariant");
435
436 InstanceKlass* ik;
437
438 // Allocation
439 if (REF_NONE == parser.reference_type()) {
440 if (class_name == vmSymbols::java_lang_Class()) {
441 // mirror
442 ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser);
443 }
444 else if (is_class_loader(class_name, parser)) {
445 // class loader
446 ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser);
447 } else {
448 // normal
449 ik = new (loader_data, size, THREAD) InstanceKlass(parser, InstanceKlass::_kind_other);
450 }
451 } else {
452 // reference
453 ik = new (loader_data, size, THREAD) InstanceRefKlass(parser);
454 }
455
456 // Check for pending exception before adding to the loader data and incrementing
457 // class count. Can get OOM here.
458 if (HAS_PENDING_EXCEPTION) {
459 return NULL;
460 }
461
462 return ik;
463 }
464
465
466 // copy method ordering from resource area to Metaspace
467 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) {
468 if (m != NULL) {
469 // allocate a new array and copy contents (memcpy?)
470 _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
471 for (int i = 0; i < m->length(); i++) {
472 _method_ordering->at_put(i, m->at(i));
473 }
474 } else {
475 _method_ordering = Universe::the_empty_int_array();
476 }
477 }
478
479 // create a new array of vtable_indices for default methods
480 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
481 Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
482 assert(default_vtable_indices() == NULL, "only create once");
483 set_default_vtable_indices(vtable_indices);
484 return vtable_indices;
485 }
486
487 InstanceKlass::InstanceKlass(const ClassFileParser& parser, unsigned kind, KlassID id) :
488 Klass(id),
489 _nest_members(NULL),
490 _nest_host(NULL),
491 _permitted_subclasses(NULL),
492 _record_components(NULL),
493 _static_field_size(parser.static_field_size()),
494 _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
495 _itable_len(parser.itable_size()),
496 _nest_host_index(0),
497 _init_state(allocated),
498 _reference_type(parser.reference_type()),
499 _init_thread(NULL)
500 {
501 set_vtable_length(parser.vtable_size());
502 set_kind(kind);
503 set_access_flags(parser.access_flags());
504 if (parser.is_hidden()) set_is_hidden();
505 set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
506 false));
507
508 assert(NULL == _methods, "underlying memory not zeroed?");
509 assert(is_instance_klass(), "is layout incorrect?");
510 assert(size_helper() == parser.layout_size(), "incorrect size_helper?");
511
512 // Set biased locking bit for all instances of this class; it will be
513 // cleared if revocation occurs too often for this type
514 if (UseBiasedLocking && BiasedLocking::enabled()) {
515 set_prototype_header(markWord::biased_locking_prototype());
516 }
517 }
518
519 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
520 Array<Method*>* methods) {
521 if (methods != NULL && methods != Universe::the_empty_method_array() &&
522 !methods->is_shared()) {
523 for (int i = 0; i < methods->length(); i++) {
524 Method* method = methods->at(i);
525 if (method == NULL) continue; // maybe null if error processing
526 // Only want to delete methods that are not executing for RedefineClasses.
527 // The previous version will point to them so they're not totally dangling
528 assert (!method->on_stack(), "shouldn't be called with methods on stack");
529 MetadataFactory::free_metadata(loader_data, method);
530 }
531 MetadataFactory::free_array<Method*>(loader_data, methods);
532 }
533 }
534
535 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
536 const Klass* super_klass,
537 Array<InstanceKlass*>* local_interfaces,
538 Array<InstanceKlass*>* transitive_interfaces) {
539 // Only deallocate transitive interfaces if not empty, same as super class
540 // or same as local interfaces. See code in parseClassFile.
541 Array<InstanceKlass*>* ti = transitive_interfaces;
542 if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
543 // check that the interfaces don't come from super class
544 Array<InstanceKlass*>* sti = (super_klass == NULL) ? NULL :
545 InstanceKlass::cast(super_klass)->transitive_interfaces();
546 if (ti != sti && ti != NULL && !ti->is_shared()) {
547 MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
548 }
549 }
550
551 // local interfaces can be empty
552 if (local_interfaces != Universe::the_empty_instance_klass_array() &&
553 local_interfaces != NULL && !local_interfaces->is_shared()) {
554 MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
555 }
556 }
557
558 void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
559 Array<RecordComponent*>* record_components) {
560 if (record_components != NULL && !record_components->is_shared()) {
561 for (int i = 0; i < record_components->length(); i++) {
562 RecordComponent* record_component = record_components->at(i);
563 MetadataFactory::free_metadata(loader_data, record_component);
564 }
565 MetadataFactory::free_array<RecordComponent*>(loader_data, record_components);
566 }
567 }
568
569 // This function deallocates the metadata and C heap pointers that the
570 // InstanceKlass points to.
571 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
572
573 // Orphan the mirror first, CMS thinks it's still live.
574 if (java_mirror() != NULL) {
575 java_lang_Class::set_klass(java_mirror(), NULL);
576 }
577
578 // Also remove mirror from handles
579 loader_data->remove_handle(_java_mirror);
580
581 // Need to take this class off the class loader data list.
582 loader_data->remove_class(this);
583
584 // The array_klass for this class is created later, after error handling.
585 // For class redefinition, we keep the original class so this scratch class
586 // doesn't have an array class. Either way, assert that there is nothing
587 // to deallocate.
588 assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet");
589
590 // Release C heap allocated data that this points to, which includes
591 // reference counting symbol names.
592 // Can't release the constant pool here because the constant pool can be
593 // deallocated separately from the InstanceKlass for default methods and
594 // redefine classes.
595 release_C_heap_structures(/* release_constant_pool */ false);
596
597 deallocate_methods(loader_data, methods());
598 set_methods(NULL);
599
600 deallocate_record_components(loader_data, record_components());
601 set_record_components(NULL);
602
603 if (method_ordering() != NULL &&
604 method_ordering() != Universe::the_empty_int_array() &&
605 !method_ordering()->is_shared()) {
606 MetadataFactory::free_array<int>(loader_data, method_ordering());
607 }
608 set_method_ordering(NULL);
609
610 // default methods can be empty
611 if (default_methods() != NULL &&
612 default_methods() != Universe::the_empty_method_array() &&
613 !default_methods()->is_shared()) {
614 MetadataFactory::free_array<Method*>(loader_data, default_methods());
615 }
616 // Do NOT deallocate the default methods, they are owned by superinterfaces.
617 set_default_methods(NULL);
618
619 // default methods vtable indices can be empty
620 if (default_vtable_indices() != NULL &&
621 !default_vtable_indices()->is_shared()) {
622 MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
623 }
624 set_default_vtable_indices(NULL);
625
626
627 // This array is in Klass, but remove it with the InstanceKlass since
628 // this place would be the only caller and it can share memory with transitive
629 // interfaces.
630 if (secondary_supers() != NULL &&
631 secondary_supers() != Universe::the_empty_klass_array() &&
632 // see comments in compute_secondary_supers about the following cast
633 (address)(secondary_supers()) != (address)(transitive_interfaces()) &&
634 !secondary_supers()->is_shared()) {
635 MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
636 }
637 set_secondary_supers(NULL);
638
639 deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
640 set_transitive_interfaces(NULL);
641 set_local_interfaces(NULL);
642
643 if (fields() != NULL && !fields()->is_shared()) {
644 MetadataFactory::free_array<jushort>(loader_data, fields());
645 }
646 set_fields(NULL, 0);
647
648 // If a method from a redefined class is using this constant pool, don't
649 // delete it, yet. The new class's previous version will point to this.
650 if (constants() != NULL) {
651 assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
652 if (!constants()->is_shared()) {
653 MetadataFactory::free_metadata(loader_data, constants());
654 }
655 // Delete any cached resolution errors for the constant pool
656 SystemDictionary::delete_resolution_error(constants());
657
658 set_constants(NULL);
659 }
660
661 if (inner_classes() != NULL &&
662 inner_classes() != Universe::the_empty_short_array() &&
663 !inner_classes()->is_shared()) {
664 MetadataFactory::free_array<jushort>(loader_data, inner_classes());
665 }
666 set_inner_classes(NULL);
667
668 if (nest_members() != NULL &&
669 nest_members() != Universe::the_empty_short_array() &&
670 !nest_members()->is_shared()) {
671 MetadataFactory::free_array<jushort>(loader_data, nest_members());
672 }
673 set_nest_members(NULL);
674
675 if (permitted_subclasses() != NULL &&
676 permitted_subclasses() != Universe::the_empty_short_array() &&
677 !permitted_subclasses()->is_shared()) {
678 MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
679 }
680 set_permitted_subclasses(NULL);
681
682 // We should deallocate the Annotations instance if it's not in shared spaces.
683 if (annotations() != NULL && !annotations()->is_shared()) {
684 MetadataFactory::free_metadata(loader_data, annotations());
685 }
686 set_annotations(NULL);
687
688 if (Arguments::is_dumping_archive()) {
689 SystemDictionaryShared::remove_dumptime_info(this);
690 }
691 }
692
693 bool InstanceKlass::is_record() const {
694 return _record_components != NULL &&
695 is_final() &&
696 java_super() == vmClasses::Record_klass();
697 }
698
699 bool InstanceKlass::is_sealed() const {
700 return _permitted_subclasses != NULL &&
701 _permitted_subclasses != Universe::the_empty_short_array();
702 }
703
704 bool InstanceKlass::should_be_initialized() const {
705 return !is_initialized();
706 }
707
708 klassItable InstanceKlass::itable() const {
709 return klassItable(const_cast<InstanceKlass*>(this));
710 }
711
712 void InstanceKlass::eager_initialize(Thread *thread) {
713 if (!EagerInitialization) return;
714
715 if (this->is_not_initialized()) {
716 // abort if the the class has a class initializer
717 if (this->class_initializer() != NULL) return;
718
719 // abort if it is java.lang.Object (initialization is handled in genesis)
720 Klass* super_klass = super();
721 if (super_klass == NULL) return;
722
723 // abort if the super class should be initialized
724 if (!InstanceKlass::cast(super_klass)->is_initialized()) return;
725
726 // call body to expose the this pointer
727 eager_initialize_impl();
728 }
729 }
730
731 // JVMTI spec thinks there are signers and protection domain in the
732 // instanceKlass. These accessors pretend these fields are there.
733 // The hprof specification also thinks these fields are in InstanceKlass.
734 oop InstanceKlass::protection_domain() const {
735 // return the protection_domain from the mirror
736 return java_lang_Class::protection_domain(java_mirror());
737 }
738
739 // To remove these from requires an incompatible change and CCC request.
740 objArrayOop InstanceKlass::signers() const {
741 // return the signers from the mirror
742 return java_lang_Class::signers(java_mirror());
743 }
744
745 oop InstanceKlass::init_lock() const {
746 // return the init lock from the mirror
747 oop lock = java_lang_Class::init_lock(java_mirror());
748 // Prevent reordering with any access of initialization state
749 OrderAccess::loadload();
750 assert(lock != NULL || !is_not_initialized(), // initialized or in_error state
751 "only fully initialized state can have a null lock");
752 return lock;
753 }
754
755 // Set the initialization lock to null so the object can be GC'ed. Any racing
756 // threads to get this lock will see a null lock and will not lock.
757 // That's okay because they all check for initialized state after getting
758 // the lock and return.
759 void InstanceKlass::fence_and_clear_init_lock() {
760 // make sure previous stores are all done, notably the init_state.
761 OrderAccess::storestore();
762 java_lang_Class::clear_init_lock(java_mirror());
763 assert(!is_not_initialized(), "class must be initialized now");
764 }
765
766 void InstanceKlass::eager_initialize_impl() {
767 EXCEPTION_MARK;
768 HandleMark hm(THREAD);
769 Handle h_init_lock(THREAD, init_lock());
770 ObjectLocker ol(h_init_lock, THREAD);
771
772 // abort if someone beat us to the initialization
773 if (!is_not_initialized()) return; // note: not equivalent to is_initialized()
774
775 ClassState old_state = init_state();
776 link_class_impl(THREAD);
777 if (HAS_PENDING_EXCEPTION) {
778 CLEAR_PENDING_EXCEPTION;
779 // Abort if linking the class throws an exception.
780
781 // Use a test to avoid redundantly resetting the state if there's
782 // no change. Set_init_state() asserts that state changes make
783 // progress, whereas here we might just be spinning in place.
784 if (old_state != _init_state)
785 set_init_state(old_state);
786 } else {
787 // linking successfull, mark class as initialized
788 set_init_state(fully_initialized);
789 fence_and_clear_init_lock();
790 // trace
791 if (log_is_enabled(Info, class, init)) {
792 ResourceMark rm(THREAD);
793 log_info(class, init)("[Initialized %s without side effects]", external_name());
794 }
795 }
796 }
797
798
799 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
800 // process. The step comments refers to the procedure described in that section.
801 // Note: implementation moved to static method to expose the this pointer.
802 void InstanceKlass::initialize(TRAPS) {
803 if (this->should_be_initialized()) {
804 initialize_impl(CHECK);
805 // Note: at this point the class may be initialized
806 // OR it may be in the state of being initialized
807 // in case of recursive initialization!
808 } else {
809 assert(is_initialized(), "sanity check");
810 }
811 }
812
813
814 bool InstanceKlass::verify_code(TRAPS) {
815 // 1) Verify the bytecodes
816 return Verifier::verify(this, should_verify_class(), THREAD);
817 }
818
819 void InstanceKlass::link_class(TRAPS) {
820 assert(is_loaded(), "must be loaded");
821 if (!is_linked()) {
822 link_class_impl(CHECK);
823 }
824 }
825
826 // Called to verify that a class can link during initialization, without
827 // throwing a VerifyError.
828 bool InstanceKlass::link_class_or_fail(TRAPS) {
829 assert(is_loaded(), "must be loaded");
830 if (!is_linked()) {
831 link_class_impl(CHECK_false);
832 }
833 return is_linked();
834 }
835
836 bool InstanceKlass::link_class_impl(TRAPS) {
837 if (DumpSharedSpaces && SystemDictionaryShared::has_class_failed_verification(this)) {
838 // This is for CDS dumping phase only -- we use the in_error_state to indicate that
839 // the class has failed verification. Throwing the NoClassDefFoundError here is just
840 // a convenient way to stop repeat attempts to verify the same (bad) class.
841 //
842 // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
843 // if we are executing Java code. This is not a problem for CDS dumping phase since
844 // it doesn't execute any Java code.
845 ResourceMark rm(THREAD);
846 Exceptions::fthrow(THREAD_AND_LOCATION,
847 vmSymbols::java_lang_NoClassDefFoundError(),
848 "Class %s, or one of its supertypes, failed class initialization",
849 external_name());
850 return false;
851 }
852 // return if already verified
853 if (is_linked()) {
854 return true;
855 }
856
857 // Timing
858 // timer handles recursion
859 JavaThread* jt = THREAD;
860
861 // link super class before linking this class
862 Klass* super_klass = super();
863 if (super_klass != NULL) {
864 if (super_klass->is_interface()) { // check if super class is an interface
865 ResourceMark rm(THREAD);
866 Exceptions::fthrow(
867 THREAD_AND_LOCATION,
868 vmSymbols::java_lang_IncompatibleClassChangeError(),
869 "class %s has interface %s as super class",
870 external_name(),
871 super_klass->external_name()
872 );
873 return false;
874 }
875
876 InstanceKlass* ik_super = InstanceKlass::cast(super_klass);
877 ik_super->link_class_impl(CHECK_false);
878 }
879
880 // link all interfaces implemented by this class before linking this class
881 Array<InstanceKlass*>* interfaces = local_interfaces();
882 int num_interfaces = interfaces->length();
883 for (int index = 0; index < num_interfaces; index++) {
884 InstanceKlass* interk = interfaces->at(index);
885 interk->link_class_impl(CHECK_false);
886 }
887
888 // in case the class is linked in the process of linking its superclasses
889 if (is_linked()) {
890 return true;
891 }
892
893 // trace only the link time for this klass that includes
894 // the verification time
895 PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
896 ClassLoader::perf_class_link_selftime(),
897 ClassLoader::perf_classes_linked(),
898 jt->get_thread_stat()->perf_recursion_counts_addr(),
899 jt->get_thread_stat()->perf_timers_addr(),
900 PerfClassTraceTime::CLASS_LINK);
901
902 // verification & rewriting
903 {
904 HandleMark hm(THREAD);
905 Handle h_init_lock(THREAD, init_lock());
906 ObjectLocker ol(h_init_lock, jt);
907 // rewritten will have been set if loader constraint error found
908 // on an earlier link attempt
909 // don't verify or rewrite if already rewritten
910 //
911
912 if (!is_linked()) {
913 if (!is_rewritten()) {
914 {
915 bool verify_ok = verify_code(THREAD);
916 if (!verify_ok) {
917 return false;
918 }
919 }
920
921 // Just in case a side-effect of verify linked this class already
922 // (which can sometimes happen since the verifier loads classes
923 // using custom class loaders, which are free to initialize things)
924 if (is_linked()) {
925 return true;
926 }
927
928 // also sets rewritten
929 rewrite_class(CHECK_false);
930 } else if (is_shared()) {
931 SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
932 }
933
934 // relocate jsrs and link methods after they are all rewritten
935 link_methods(CHECK_false);
936
937 // Initialize the vtable and interface table after
938 // methods have been rewritten since rewrite may
939 // fabricate new Method*s.
940 // also does loader constraint checking
941 //
942 // initialize_vtable and initialize_itable need to be rerun
943 // for a shared class if
944 // 1) the class is loaded by custom class loader or
945 // 2) the class is loaded by built-in class loader but failed to add archived loader constraints
946 bool need_init_table = true;
947 if (is_shared() && SystemDictionaryShared::check_linking_constraints(THREAD, this)) {
948 need_init_table = false;
949 }
950 if (need_init_table) {
951 vtable().initialize_vtable_and_check_constraints(CHECK_false);
952 itable().initialize_itable_and_check_constraints(CHECK_false);
953 }
954 #ifdef ASSERT
955 vtable().verify(tty, true);
956 // In case itable verification is ever added.
957 // itable().verify(tty, true);
958 #endif
959 if (UseVtableBasedCHA) {
960 MutexLocker ml(THREAD, Compile_lock);
961 set_init_state(linked);
962
963 // Now flush all code that assume the class is not linked.
964 if (Universe::is_fully_initialized()) {
965 CodeCache::flush_dependents_on(this);
966 }
967 } else {
968 set_init_state(linked);
969 }
970 if (JvmtiExport::should_post_class_prepare()) {
971 JvmtiExport::post_class_prepare(THREAD, this);
972 }
973 }
974 }
975 return true;
976 }
977
978 // Rewrite the byte codes of all of the methods of a class.
979 // The rewriter must be called exactly once. Rewriting must happen after
980 // verification but before the first method of the class is executed.
981 void InstanceKlass::rewrite_class(TRAPS) {
982 assert(is_loaded(), "must be loaded");
983 if (is_rewritten()) {
984 assert(is_shared(), "rewriting an unshared class?");
985 return;
986 }
987 Rewriter::rewrite(this, CHECK);
988 set_rewritten();
989 }
990
991 // Now relocate and link method entry points after class is rewritten.
992 // This is outside is_rewritten flag. In case of an exception, it can be
993 // executed more than once.
994 void InstanceKlass::link_methods(TRAPS) {
995 int len = methods()->length();
996 for (int i = len-1; i >= 0; i--) {
997 methodHandle m(THREAD, methods()->at(i));
998
999 // Set up method entry points for compiler and interpreter .
1000 m->link_method(m, CHECK);
1001 }
1002 }
1003
1004 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
1005 void InstanceKlass::initialize_super_interfaces(TRAPS) {
1006 assert (has_nonstatic_concrete_methods(), "caller should have checked this");
1007 for (int i = 0; i < local_interfaces()->length(); ++i) {
1008 InstanceKlass* ik = local_interfaces()->at(i);
1009
1010 // Initialization is depth first search ie. we start with top of the inheritance tree
1011 // has_nonstatic_concrete_methods drives searching superinterfaces since it
1012 // means has_nonstatic_concrete_methods in its superinterface hierarchy
1013 if (ik->has_nonstatic_concrete_methods()) {
1014 ik->initialize_super_interfaces(CHECK);
1015 }
1016
1017 // Only initialize() interfaces that "declare" concrete methods.
1018 if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
1019 ik->initialize(CHECK);
1020 }
1021 }
1022 }
1023
1024 ResourceHashtable<const InstanceKlass*, OopHandle,
1025 primitive_hash<const InstanceKlass*>,
1026 primitive_equals<const InstanceKlass*>,
1027 107,
1028 ResourceObj::C_HEAP,
1029 mtClass>
1030 _initialization_error_table;
1031
1032 void InstanceKlass::add_initialization_error(JavaThread* current, Handle exception) {
1033 // Create the same exception with a message indicating the thread name,
1034 // and the StackTraceElements.
1035 // If the initialization error is OOM, this might not work, but if GC kicks in
1036 // this would be still be helpful.
1037 JavaThread* THREAD = current;
1038 Handle init_error = java_lang_Throwable::create_initialization_error(current, exception);
1039 ResourceMark rm(THREAD);
1040 if (init_error.is_null()) {
1041 log_trace(class, init)("Initialization error is null for class %s", external_name());
1042 return;
1043 }
1044
1045 MutexLocker ml(THREAD, ClassInitError_lock);
1046 OopHandle elem = OopHandle(Universe::vm_global(), init_error());
1047 bool created;
1048 _initialization_error_table.put_if_absent(this, elem, &created);
1049 assert(created, "Initialization is single threaded");
1050 log_trace(class, init)("Initialization error added for class %s", external_name());
1051 }
1052
1053 oop InstanceKlass::get_initialization_error(JavaThread* current) {
1054 MutexLocker ml(current, ClassInitError_lock);
1055 OopHandle* h = _initialization_error_table.get(this);
1056 return (h != nullptr) ? h->resolve() : nullptr;
1057 }
1058
1059 // Need to remove entries for unloaded classes.
1060 void InstanceKlass::clean_initialization_error_table() {
1061 struct InitErrorTableCleaner {
1062 bool do_entry(const InstanceKlass* ik, OopHandle h) {
1063 if (!ik->is_loader_alive()) {
1064 h.release(Universe::vm_global());
1065 return true;
1066 } else {
1067 return false;
1068 }
1069 }
1070 };
1071
1072 MutexLocker ml(ClassInitError_lock);
1073 InitErrorTableCleaner cleaner;
1074 _initialization_error_table.unlink(&cleaner);
1075 }
1076
1077 void InstanceKlass::initialize_impl(TRAPS) {
1078 HandleMark hm(THREAD);
1079
1080 // Make sure klass is linked (verified) before initialization
1081 // A class could already be verified, since it has been reflected upon.
1082 link_class(CHECK);
1083
1084 DTRACE_CLASSINIT_PROBE(required, -1);
1085
1086 bool wait = false;
1087
1088 JavaThread* jt = THREAD;
1089
1090 // refer to the JVM book page 47 for description of steps
1091 // Step 1
1092 {
1093 Handle h_init_lock(THREAD, init_lock());
1094 ObjectLocker ol(h_init_lock, jt);
1095
1096 // Step 2
1097 // If we were to use wait() instead of waitInterruptibly() then
1098 // we might end up throwing IE from link/symbol resolution sites
1099 // that aren't expected to throw. This would wreak havoc. See 6320309.
1100 while (is_being_initialized() && !is_reentrant_initialization(jt)) {
1101 wait = true;
1102 jt->set_class_to_be_initialized(this);
1103 ol.wait_uninterruptibly(jt);
1104 jt->set_class_to_be_initialized(NULL);
1105 }
1106
1107 // Step 3
1108 if (is_being_initialized() && is_reentrant_initialization(jt)) {
1109 DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
1110 return;
1111 }
1112
1113 // Step 4
1114 if (is_initialized()) {
1115 DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
1116 return;
1117 }
1118
1119 // Step 5
1120 if (is_in_error_state()) {
1121 DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
1122 ResourceMark rm(THREAD);
1123 Handle cause(THREAD, get_initialization_error(THREAD));
1124
1125 stringStream ss;
1126 ss.print("Could not initialize class %s", external_name());
1127 if (cause.is_null()) {
1128 THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), ss.as_string());
1129 } else {
1130 THROW_MSG_CAUSE(vmSymbols::java_lang_NoClassDefFoundError(),
1131 ss.as_string(), cause);
1132 }
1133 }
1134
1135 // Step 6
1136 set_init_state(being_initialized);
1137 set_init_thread(jt);
1138 }
1139
1140 // Step 7
1141 // Next, if C is a class rather than an interface, initialize it's super class and super
1142 // interfaces.
1143 if (!is_interface()) {
1144 Klass* super_klass = super();
1145 if (super_klass != NULL && super_klass->should_be_initialized()) {
1146 super_klass->initialize(THREAD);
1147 }
1148 // If C implements any interface that declares a non-static, concrete method,
1149 // the initialization of C triggers initialization of its super interfaces.
1150 // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
1151 // having a superinterface that declares, non-static, concrete methods
1152 if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) {
1153 initialize_super_interfaces(THREAD);
1154 }
1155
1156 // If any exceptions, complete abruptly, throwing the same exception as above.
1157 if (HAS_PENDING_EXCEPTION) {
1158 Handle e(THREAD, PENDING_EXCEPTION);
1159 CLEAR_PENDING_EXCEPTION;
1160 {
1161 EXCEPTION_MARK;
1162 add_initialization_error(THREAD, e);
1163 // Locks object, set state, and notify all waiting threads
1164 set_initialization_state_and_notify(initialization_error, THREAD);
1165 CLEAR_PENDING_EXCEPTION;
1166 }
1167 DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1168 THROW_OOP(e());
1169 }
1170 }
1171
1172
1173 // Step 8
1174 {
1175 DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1176 if (class_initializer() != NULL) {
1177 // Timer includes any side effects of class initialization (resolution,
1178 // etc), but not recursive entry into call_class_initializer().
1179 PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1180 ClassLoader::perf_class_init_selftime(),
1181 ClassLoader::perf_classes_inited(),
1182 jt->get_thread_stat()->perf_recursion_counts_addr(),
1183 jt->get_thread_stat()->perf_timers_addr(),
1184 PerfClassTraceTime::CLASS_CLINIT);
1185 call_class_initializer(THREAD);
1186 } else {
1187 // The elapsed time is so small it's not worth counting.
1188 if (UsePerfData) {
1189 ClassLoader::perf_classes_inited()->inc();
1190 }
1191 call_class_initializer(THREAD);
1192 }
1193 }
1194
1195 // Step 9
1196 if (!HAS_PENDING_EXCEPTION) {
1197 set_initialization_state_and_notify(fully_initialized, CHECK);
1198 debug_only(vtable().verify(tty, true);)
1199 }
1200 else {
1201 // Step 10 and 11
1202 Handle e(THREAD, PENDING_EXCEPTION);
1203 CLEAR_PENDING_EXCEPTION;
1204 // JVMTI has already reported the pending exception
1205 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1206 JvmtiExport::clear_detected_exception(jt);
1207 {
1208 EXCEPTION_MARK;
1209 add_initialization_error(THREAD, e);
1210 set_initialization_state_and_notify(initialization_error, THREAD);
1211 CLEAR_PENDING_EXCEPTION; // ignore any exception thrown, class initialization error is thrown below
1212 // JVMTI has already reported the pending exception
1213 // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1214 JvmtiExport::clear_detected_exception(jt);
1215 }
1216 DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1217 if (e->is_a(vmClasses::Error_klass())) {
1218 THROW_OOP(e());
1219 } else {
1220 JavaCallArguments args(e);
1221 THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
1222 vmSymbols::throwable_void_signature(),
1223 &args);
1224 }
1225 }
1226 DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1227 }
1228
1229
1230 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
1231 Handle h_init_lock(THREAD, init_lock());
1232 if (h_init_lock() != NULL) {
1233 ObjectLocker ol(h_init_lock, THREAD);
1234 set_init_thread(NULL); // reset _init_thread before changing _init_state
1235 set_init_state(state);
1236 fence_and_clear_init_lock();
1237 ol.notify_all(CHECK);
1238 } else {
1239 assert(h_init_lock() != NULL, "The initialization state should never be set twice");
1240 set_init_thread(NULL); // reset _init_thread before changing _init_state
1241 set_init_state(state);
1242 }
1243 }
1244
1245 InstanceKlass* InstanceKlass::implementor() const {
1246 InstanceKlass* volatile* ik = adr_implementor();
1247 if (ik == NULL) {
1248 return NULL;
1249 } else {
1250 // This load races with inserts, and therefore needs acquire.
1251 InstanceKlass* ikls = Atomic::load_acquire(ik);
1252 if (ikls != NULL && !ikls->is_loader_alive()) {
1253 return NULL; // don't return unloaded class
1254 } else {
1255 return ikls;
1256 }
1257 }
1258 }
1259
1260
1261 void InstanceKlass::set_implementor(InstanceKlass* ik) {
1262 assert_locked_or_safepoint(Compile_lock);
1263 assert(is_interface(), "not interface");
1264 InstanceKlass* volatile* addr = adr_implementor();
1265 assert(addr != NULL, "null addr");
1266 if (addr != NULL) {
1267 Atomic::release_store(addr, ik);
1268 }
1269 }
1270
1271 int InstanceKlass::nof_implementors() const {
1272 InstanceKlass* ik = implementor();
1273 if (ik == NULL) {
1274 return 0;
1275 } else if (ik != this) {
1276 return 1;
1277 } else {
1278 return 2;
1279 }
1280 }
1281
1282 // The embedded _implementor field can only record one implementor.
1283 // When there are more than one implementors, the _implementor field
1284 // is set to the interface Klass* itself. Following are the possible
1285 // values for the _implementor field:
1286 // NULL - no implementor
1287 // implementor Klass* - one implementor
1288 // self - more than one implementor
1289 //
1290 // The _implementor field only exists for interfaces.
1291 void InstanceKlass::add_implementor(InstanceKlass* ik) {
1292 if (Universe::is_fully_initialized()) {
1293 assert_lock_strong(Compile_lock);
1294 }
1295 assert(is_interface(), "not interface");
1296 // Filter out my subinterfaces.
1297 // (Note: Interfaces are never on the subklass list.)
1298 if (ik->is_interface()) return;
1299
1300 // Filter out subclasses whose supers already implement me.
1301 // (Note: CHA must walk subclasses of direct implementors
1302 // in order to locate indirect implementors.)
1303 InstanceKlass* super_ik = ik->java_super();
1304 if (super_ik != NULL && super_ik->implements_interface(this))
1305 // We only need to check one immediate superclass, since the
1306 // implements_interface query looks at transitive_interfaces.
1307 // Any supers of the super have the same (or fewer) transitive_interfaces.
1308 return;
1309
1310 InstanceKlass* iklass = implementor();
1311 if (iklass == NULL) {
1312 set_implementor(ik);
1313 } else if (iklass != this && iklass != ik) {
1314 // There is already an implementor. Use itself as an indicator of
1315 // more than one implementors.
1316 set_implementor(this);
1317 }
1318
1319 // The implementor also implements the transitive_interfaces
1320 for (int index = 0; index < local_interfaces()->length(); index++) {
1321 local_interfaces()->at(index)->add_implementor(ik);
1322 }
1323 }
1324
1325 void InstanceKlass::init_implementor() {
1326 if (is_interface()) {
1327 set_implementor(NULL);
1328 }
1329 }
1330
1331
1332 void InstanceKlass::process_interfaces() {
1333 // link this class into the implementors list of every interface it implements
1334 for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1335 assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
1336 InstanceKlass* interf = local_interfaces()->at(i);
1337 assert(interf->is_interface(), "expected interface");
1338 interf->add_implementor(this);
1339 }
1340 }
1341
1342 bool InstanceKlass::can_be_primary_super_slow() const {
1343 if (is_interface())
1344 return false;
1345 else
1346 return Klass::can_be_primary_super_slow();
1347 }
1348
1349 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1350 Array<InstanceKlass*>* transitive_interfaces) {
1351 // The secondaries are the implemented interfaces.
1352 Array<InstanceKlass*>* interfaces = transitive_interfaces;
1353 int num_secondaries = num_extra_slots + interfaces->length();
1354 if (num_secondaries == 0) {
1355 // Must share this for correct bootstrapping!
1356 set_secondary_supers(Universe::the_empty_klass_array());
1357 return NULL;
1358 } else if (num_extra_slots == 0) {
1359 // The secondary super list is exactly the same as the transitive interfaces, so
1360 // let's use it instead of making a copy.
1361 // Redefine classes has to be careful not to delete this!
1362 // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1363 // (but it's safe to do here because we won't write into _secondary_supers from this point on).
1364 set_secondary_supers((Array<Klass*>*)(address)interfaces);
1365 return NULL;
1366 } else {
1367 // Copy transitive interfaces to a temporary growable array to be constructed
1368 // into the secondary super list with extra slots.
1369 GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1370 for (int i = 0; i < interfaces->length(); i++) {
1371 secondaries->push(interfaces->at(i));
1372 }
1373 return secondaries;
1374 }
1375 }
1376
1377 bool InstanceKlass::implements_interface(Klass* k) const {
1378 if (this == k) return true;
1379 assert(k->is_interface(), "should be an interface class");
1380 for (int i = 0; i < transitive_interfaces()->length(); i++) {
1381 if (transitive_interfaces()->at(i) == k) {
1382 return true;
1383 }
1384 }
1385 return false;
1386 }
1387
1388 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1389 // Verify direct super interface
1390 if (this == k) return true;
1391 assert(k->is_interface(), "should be an interface class");
1392 for (int i = 0; i < local_interfaces()->length(); i++) {
1393 if (local_interfaces()->at(i) == k) {
1394 return true;
1395 }
1396 }
1397 return false;
1398 }
1399
1400 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) {
1401 check_array_allocation_length(length, arrayOopDesc::max_array_length(T_OBJECT), CHECK_NULL);
1402 int size = objArrayOopDesc::object_size(length);
1403 Klass* ak = array_klass(n, CHECK_NULL);
1404 objArrayOop o = (objArrayOop)Universe::heap()->array_allocate(ak, size, length,
1405 /* do_zero */ true, CHECK_NULL);
1406 return o;
1407 }
1408
1409 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1410 if (TraceFinalizerRegistration) {
1411 tty->print("Registered ");
1412 i->print_value_on(tty);
1413 tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i));
1414 }
1415 instanceHandle h_i(THREAD, i);
1416 // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1417 JavaValue result(T_VOID);
1418 JavaCallArguments args(h_i);
1419 methodHandle mh (THREAD, Universe::finalizer_register_method());
1420 JavaCalls::call(&result, mh, &args, CHECK_NULL);
1421 return h_i();
1422 }
1423
1424 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1425 bool has_finalizer_flag = has_finalizer(); // Query before possible GC
1426 int size = size_helper(); // Query before forming handle.
1427
1428 instanceOop i;
1429
1430 i = (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL);
1431 if (has_finalizer_flag && !RegisterFinalizersAtInit) {
1432 i = register_finalizer(i, CHECK_NULL);
1433 }
1434 return i;
1435 }
1436
1437 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) {
1438 return instanceHandle(THREAD, allocate_instance(THREAD));
1439 }
1440
1441 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1442 if (is_interface() || is_abstract()) {
1443 ResourceMark rm(THREAD);
1444 THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1445 : vmSymbols::java_lang_InstantiationException(), external_name());
1446 }
1447 if (this == vmClasses::Class_klass()) {
1448 ResourceMark rm(THREAD);
1449 THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1450 : vmSymbols::java_lang_IllegalAccessException(), external_name());
1451 }
1452 }
1453
1454 Klass* InstanceKlass::array_klass(int n, TRAPS) {
1455 // Need load-acquire for lock-free read
1456 if (array_klasses_acquire() == NULL) {
1457 ResourceMark rm(THREAD);
1458 JavaThread *jt = THREAD;
1459 {
1460 // Atomic creation of array_klasses
1461 MutexLocker ma(THREAD, MultiArray_lock);
1462
1463 // Check if update has already taken place
1464 if (array_klasses() == NULL) {
1465 ObjArrayKlass* k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL);
1466 // use 'release' to pair with lock-free load
1467 release_set_array_klasses(k);
1468 }
1469 }
1470 }
1471 // array_klasses() will always be set at this point
1472 ObjArrayKlass* oak = array_klasses();
1473 return oak->array_klass(n, THREAD);
1474 }
1475
1476 Klass* InstanceKlass::array_klass_or_null(int n) {
1477 // Need load-acquire for lock-free read
1478 ObjArrayKlass* oak = array_klasses_acquire();
1479 if (oak == NULL) {
1480 return NULL;
1481 } else {
1482 return oak->array_klass_or_null(n);
1483 }
1484 }
1485
1486 Klass* InstanceKlass::array_klass(TRAPS) {
1487 return array_klass(1, THREAD);
1488 }
1489
1490 Klass* InstanceKlass::array_klass_or_null() {
1491 return array_klass_or_null(1);
1492 }
1493
1494 static int call_class_initializer_counter = 0; // for debugging
1495
1496 Method* InstanceKlass::class_initializer() const {
1497 Method* clinit = find_method(
1498 vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1499 if (clinit != NULL && clinit->has_valid_initializer_flags()) {
1500 return clinit;
1501 }
1502 return NULL;
1503 }
1504
1505 void InstanceKlass::call_class_initializer(TRAPS) {
1506 if (ReplayCompiles &&
1507 (ReplaySuppressInitializers == 1 ||
1508 (ReplaySuppressInitializers >= 2 && class_loader() != NULL))) {
1509 // Hide the existence of the initializer for the purpose of replaying the compile
1510 return;
1511 }
1512
1513 methodHandle h_method(THREAD, class_initializer());
1514 assert(!is_initialized(), "we cannot initialize twice");
1515 LogTarget(Info, class, init) lt;
1516 if (lt.is_enabled()) {
1517 ResourceMark rm(THREAD);
1518 LogStream ls(lt);
1519 ls.print("%d Initializing ", call_class_initializer_counter++);
1520 name()->print_value_on(&ls);
1521 ls.print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this));
1522 }
1523 if (h_method() != NULL) {
1524 JavaCallArguments args; // No arguments
1525 JavaValue result(T_VOID);
1526 JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1527 }
1528 }
1529
1530
1531 void InstanceKlass::mask_for(const methodHandle& method, int bci,
1532 InterpreterOopMap* entry_for) {
1533 // Lazily create the _oop_map_cache at first request
1534 // Lock-free access requires load_acquire.
1535 OopMapCache* oop_map_cache = Atomic::load_acquire(&_oop_map_cache);
1536 if (oop_map_cache == NULL) {
1537 MutexLocker x(OopMapCacheAlloc_lock);
1538 // Check if _oop_map_cache was allocated while we were waiting for this lock
1539 if ((oop_map_cache = _oop_map_cache) == NULL) {
1540 oop_map_cache = new OopMapCache();
1541 // Ensure _oop_map_cache is stable, since it is examined without a lock
1542 Atomic::release_store(&_oop_map_cache, oop_map_cache);
1543 }
1544 }
1545 // _oop_map_cache is constant after init; lookup below does its own locking.
1546 oop_map_cache->lookup(method, bci, entry_for);
1547 }
1548
1549 bool InstanceKlass::contains_field_offset(int offset) {
1550 fieldDescriptor fd;
1551 return find_field_from_offset(offset, false, &fd);
1552 }
1553
1554 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1555 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1556 Symbol* f_name = fs.name();
1557 Symbol* f_sig = fs.signature();
1558 if (f_name == name && f_sig == sig) {
1559 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
1560 return true;
1561 }
1562 }
1563 return false;
1564 }
1565
1566
1567 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1568 const int n = local_interfaces()->length();
1569 for (int i = 0; i < n; i++) {
1570 Klass* intf1 = local_interfaces()->at(i);
1571 assert(intf1->is_interface(), "just checking type");
1572 // search for field in current interface
1573 if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
1574 assert(fd->is_static(), "interface field must be static");
1575 return intf1;
1576 }
1577 // search for field in direct superinterfaces
1578 Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
1579 if (intf2 != NULL) return intf2;
1580 }
1581 // otherwise field lookup fails
1582 return NULL;
1583 }
1584
1585
1586 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1587 // search order according to newest JVM spec (5.4.3.2, p.167).
1588 // 1) search for field in current klass
1589 if (find_local_field(name, sig, fd)) {
1590 return const_cast<InstanceKlass*>(this);
1591 }
1592 // 2) search for field recursively in direct superinterfaces
1593 { Klass* intf = find_interface_field(name, sig, fd);
1594 if (intf != NULL) return intf;
1595 }
1596 // 3) apply field lookup recursively if superclass exists
1597 { Klass* supr = super();
1598 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd);
1599 }
1600 // 4) otherwise field lookup fails
1601 return NULL;
1602 }
1603
1604
1605 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
1606 // search order according to newest JVM spec (5.4.3.2, p.167).
1607 // 1) search for field in current klass
1608 if (find_local_field(name, sig, fd)) {
1609 if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
1610 }
1611 // 2) search for field recursively in direct superinterfaces
1612 if (is_static) {
1613 Klass* intf = find_interface_field(name, sig, fd);
1614 if (intf != NULL) return intf;
1615 }
1616 // 3) apply field lookup recursively if superclass exists
1617 { Klass* supr = super();
1618 if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
1619 }
1620 // 4) otherwise field lookup fails
1621 return NULL;
1622 }
1623
1624
1625 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1626 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1627 if (fs.offset() == offset) {
1628 fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
1629 if (fd->is_static() == is_static) return true;
1630 }
1631 }
1632 return false;
1633 }
1634
1635
1636 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1637 Klass* klass = const_cast<InstanceKlass*>(this);
1638 while (klass != NULL) {
1639 if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
1640 return true;
1641 }
1642 klass = klass->super();
1643 }
1644 return false;
1645 }
1646
1647
1648 void InstanceKlass::methods_do(void f(Method* method)) {
1649 // Methods aren't stable until they are loaded. This can be read outside
1650 // a lock through the ClassLoaderData for profiling
1651 // Redefined scratch classes are on the list and need to be cleaned
1652 if (!is_loaded() && !is_scratch_class()) {
1653 return;
1654 }
1655
1656 int len = methods()->length();
1657 for (int index = 0; index < len; index++) {
1658 Method* m = methods()->at(index);
1659 assert(m->is_method(), "must be method");
1660 f(m);
1661 }
1662 }
1663
1664
1665 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1666 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1667 if (fs.access_flags().is_static()) {
1668 fieldDescriptor& fd = fs.field_descriptor();
1669 cl->do_field(&fd);
1670 }
1671 }
1672 }
1673
1674
1675 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
1676 for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1677 if (fs.access_flags().is_static()) {
1678 fieldDescriptor& fd = fs.field_descriptor();
1679 f(&fd, mirror, CHECK);
1680 }
1681 }
1682 }
1683
1684
1685 static int compare_fields_by_offset(int* a, int* b) {
1686 return a[0] - b[0];
1687 }
1688
1689 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1690 InstanceKlass* super = superklass();
1691 if (super != NULL) {
1692 super->do_nonstatic_fields(cl);
1693 }
1694 fieldDescriptor fd;
1695 int length = java_fields_count();
1696 // In DebugInfo nonstatic fields are sorted by offset.
1697 int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass);
1698 int j = 0;
1699 for (int i = 0; i < length; i += 1) {
1700 fd.reinitialize(this, i);
1701 if (!fd.is_static()) {
1702 fields_sorted[j + 0] = fd.offset();
1703 fields_sorted[j + 1] = i;
1704 j += 2;
1705 }
1706 }
1707 if (j > 0) {
1708 length = j;
1709 // _sort_Fn is defined in growableArray.hpp.
1710 qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset);
1711 for (int i = 0; i < length; i += 2) {
1712 fd.reinitialize(this, fields_sorted[i + 1]);
1713 assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields");
1714 cl->do_field(&fd);
1715 }
1716 }
1717 FREE_C_HEAP_ARRAY(int, fields_sorted);
1718 }
1719
1720
1721 void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) {
1722 if (array_klasses() != NULL)
1723 array_klasses()->array_klasses_do(f, THREAD);
1724 }
1725
1726 void InstanceKlass::array_klasses_do(void f(Klass* k)) {
1727 if (array_klasses() != NULL)
1728 array_klasses()->array_klasses_do(f);
1729 }
1730
1731 #ifdef ASSERT
1732 static int linear_search(const Array<Method*>* methods,
1733 const Symbol* name,
1734 const Symbol* signature) {
1735 const int len = methods->length();
1736 for (int index = 0; index < len; index++) {
1737 const Method* const m = methods->at(index);
1738 assert(m->is_method(), "must be method");
1739 if (m->signature() == signature && m->name() == name) {
1740 return index;
1741 }
1742 }
1743 return -1;
1744 }
1745 #endif
1746
1747 bool InstanceKlass::_disable_method_binary_search = false;
1748
1749 NOINLINE int linear_search(const Array<Method*>* methods, const Symbol* name) {
1750 int len = methods->length();
1751 int l = 0;
1752 int h = len - 1;
1753 while (l <= h) {
1754 Method* m = methods->at(l);
1755 if (m->name() == name) {
1756 return l;
1757 }
1758 l++;
1759 }
1760 return -1;
1761 }
1762
1763 inline int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
1764 if (_disable_method_binary_search) {
1765 assert(DynamicDumpSharedSpaces, "must be");
1766 // At the final stage of dynamic dumping, the methods array may not be sorted
1767 // by ascending addresses of their names, so we can't use binary search anymore.
1768 // However, methods with the same name are still laid out consecutively inside the
1769 // methods array, so let's look for the first one that matches.
1770 return linear_search(methods, name);
1771 }
1772
1773 int len = methods->length();
1774 int l = 0;
1775 int h = len - 1;
1776
1777 // methods are sorted by ascending addresses of their names, so do binary search
1778 while (l <= h) {
1779 int mid = (l + h) >> 1;
1780 Method* m = methods->at(mid);
1781 assert(m->is_method(), "must be method");
1782 int res = m->name()->fast_compare(name);
1783 if (res == 0) {
1784 return mid;
1785 } else if (res < 0) {
1786 l = mid + 1;
1787 } else {
1788 h = mid - 1;
1789 }
1790 }
1791 return -1;
1792 }
1793
1794 // find_method looks up the name/signature in the local methods array
1795 Method* InstanceKlass::find_method(const Symbol* name,
1796 const Symbol* signature) const {
1797 return find_method_impl(name, signature,
1798 OverpassLookupMode::find,
1799 StaticLookupMode::find,
1800 PrivateLookupMode::find);
1801 }
1802
1803 Method* InstanceKlass::find_method_impl(const Symbol* name,
1804 const Symbol* signature,
1805 OverpassLookupMode overpass_mode,
1806 StaticLookupMode static_mode,
1807 PrivateLookupMode private_mode) const {
1808 return InstanceKlass::find_method_impl(methods(),
1809 name,
1810 signature,
1811 overpass_mode,
1812 static_mode,
1813 private_mode);
1814 }
1815
1816 // find_instance_method looks up the name/signature in the local methods array
1817 // and skips over static methods
1818 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
1819 const Symbol* name,
1820 const Symbol* signature,
1821 PrivateLookupMode private_mode) {
1822 Method* const meth = InstanceKlass::find_method_impl(methods,
1823 name,
1824 signature,
1825 OverpassLookupMode::find,
1826 StaticLookupMode::skip,
1827 private_mode);
1828 assert(((meth == NULL) || !meth->is_static()),
1829 "find_instance_method should have skipped statics");
1830 return meth;
1831 }
1832
1833 // find_instance_method looks up the name/signature in the local methods array
1834 // and skips over static methods
1835 Method* InstanceKlass::find_instance_method(const Symbol* name,
1836 const Symbol* signature,
1837 PrivateLookupMode private_mode) const {
1838 return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
1839 }
1840
1841 // Find looks up the name/signature in the local methods array
1842 // and filters on the overpass, static and private flags
1843 // This returns the first one found
1844 // note that the local methods array can have up to one overpass, one static
1845 // and one instance (private or not) with the same name/signature
1846 Method* InstanceKlass::find_local_method(const Symbol* name,
1847 const Symbol* signature,
1848 OverpassLookupMode overpass_mode,
1849 StaticLookupMode static_mode,
1850 PrivateLookupMode private_mode) const {
1851 return InstanceKlass::find_method_impl(methods(),
1852 name,
1853 signature,
1854 overpass_mode,
1855 static_mode,
1856 private_mode);
1857 }
1858
1859 // Find looks up the name/signature in the local methods array
1860 // and filters on the overpass, static and private flags
1861 // This returns the first one found
1862 // note that the local methods array can have up to one overpass, one static
1863 // and one instance (private or not) with the same name/signature
1864 Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
1865 const Symbol* name,
1866 const Symbol* signature,
1867 OverpassLookupMode overpass_mode,
1868 StaticLookupMode static_mode,
1869 PrivateLookupMode private_mode) {
1870 return InstanceKlass::find_method_impl(methods,
1871 name,
1872 signature,
1873 overpass_mode,
1874 static_mode,
1875 private_mode);
1876 }
1877
1878 Method* InstanceKlass::find_method(const Array<Method*>* methods,
1879 const Symbol* name,
1880 const Symbol* signature) {
1881 return InstanceKlass::find_method_impl(methods,
1882 name,
1883 signature,
1884 OverpassLookupMode::find,
1885 StaticLookupMode::find,
1886 PrivateLookupMode::find);
1887 }
1888
1889 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
1890 const Symbol* name,
1891 const Symbol* signature,
1892 OverpassLookupMode overpass_mode,
1893 StaticLookupMode static_mode,
1894 PrivateLookupMode private_mode) {
1895 int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
1896 return hit >= 0 ? methods->at(hit): NULL;
1897 }
1898
1899 // true if method matches signature and conforms to skipping_X conditions.
1900 static bool method_matches(const Method* m,
1901 const Symbol* signature,
1902 bool skipping_overpass,
1903 bool skipping_static,
1904 bool skipping_private) {
1905 return ((m->signature() == signature) &&
1906 (!skipping_overpass || !m->is_overpass()) &&
1907 (!skipping_static || !m->is_static()) &&
1908 (!skipping_private || !m->is_private()));
1909 }
1910
1911 // Used directly for default_methods to find the index into the
1912 // default_vtable_indices, and indirectly by find_method
1913 // find_method_index looks in the local methods array to return the index
1914 // of the matching name/signature. If, overpass methods are being ignored,
1915 // the search continues to find a potential non-overpass match. This capability
1916 // is important during method resolution to prefer a static method, for example,
1917 // over an overpass method.
1918 // There is the possibility in any _method's array to have the same name/signature
1919 // for a static method, an overpass method and a local instance method
1920 // To correctly catch a given method, the search criteria may need
1921 // to explicitly skip the other two. For local instance methods, it
1922 // is often necessary to skip private methods
1923 int InstanceKlass::find_method_index(const Array<Method*>* methods,
1924 const Symbol* name,
1925 const Symbol* signature,
1926 OverpassLookupMode overpass_mode,
1927 StaticLookupMode static_mode,
1928 PrivateLookupMode private_mode) {
1929 const bool skipping_overpass = (overpass_mode == OverpassLookupMode::skip);
1930 const bool skipping_static = (static_mode == StaticLookupMode::skip);
1931 const bool skipping_private = (private_mode == PrivateLookupMode::skip);
1932 const int hit = quick_search(methods, name);
1933 if (hit != -1) {
1934 const Method* const m = methods->at(hit);
1935
1936 // Do linear search to find matching signature. First, quick check
1937 // for common case, ignoring overpasses if requested.
1938 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1939 return hit;
1940 }
1941
1942 // search downwards through overloaded methods
1943 int i;
1944 for (i = hit - 1; i >= 0; --i) {
1945 const Method* const m = methods->at(i);
1946 assert(m->is_method(), "must be method");
1947 if (m->name() != name) {
1948 break;
1949 }
1950 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1951 return i;
1952 }
1953 }
1954 // search upwards
1955 for (i = hit + 1; i < methods->length(); ++i) {
1956 const Method* const m = methods->at(i);
1957 assert(m->is_method(), "must be method");
1958 if (m->name() != name) {
1959 break;
1960 }
1961 if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1962 return i;
1963 }
1964 }
1965 // not found
1966 #ifdef ASSERT
1967 const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
1968 linear_search(methods, name, signature);
1969 assert(-1 == index, "binary search should have found entry %d", index);
1970 #endif
1971 }
1972 return -1;
1973 }
1974
1975 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
1976 return find_method_by_name(methods(), name, end);
1977 }
1978
1979 int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
1980 const Symbol* name,
1981 int* end_ptr) {
1982 assert(end_ptr != NULL, "just checking");
1983 int start = quick_search(methods, name);
1984 int end = start + 1;
1985 if (start != -1) {
1986 while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
1987 while (end < methods->length() && (methods->at(end))->name() == name) ++end;
1988 *end_ptr = end;
1989 return start;
1990 }
1991 return -1;
1992 }
1993
1994 // uncached_lookup_method searches both the local class methods array and all
1995 // superclasses methods arrays, skipping any overpass methods in superclasses,
1996 // and possibly skipping private methods.
1997 Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
1998 const Symbol* signature,
1999 OverpassLookupMode overpass_mode,
2000 PrivateLookupMode private_mode) const {
2001 OverpassLookupMode overpass_local_mode = overpass_mode;
2002 const Klass* klass = this;
2003 while (klass != NULL) {
2004 Method* const method = InstanceKlass::cast(klass)->find_method_impl(name,
2005 signature,
2006 overpass_local_mode,
2007 StaticLookupMode::find,
2008 private_mode);
2009 if (method != NULL) {
2010 return method;
2011 }
2012 klass = klass->super();
2013 overpass_local_mode = OverpassLookupMode::skip; // Always ignore overpass methods in superclasses
2014 }
2015 return NULL;
2016 }
2017
2018 #ifdef ASSERT
2019 // search through class hierarchy and return true if this class or
2020 // one of the superclasses was redefined
2021 bool InstanceKlass::has_redefined_this_or_super() const {
2022 const Klass* klass = this;
2023 while (klass != NULL) {
2024 if (InstanceKlass::cast(klass)->has_been_redefined()) {
2025 return true;
2026 }
2027 klass = klass->super();
2028 }
2029 return false;
2030 }
2031 #endif
2032
2033 // lookup a method in the default methods list then in all transitive interfaces
2034 // Do NOT return private or static methods
2035 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
2036 Symbol* signature) const {
2037 Method* m = NULL;
2038 if (default_methods() != NULL) {
2039 m = find_method(default_methods(), name, signature);
2040 }
2041 // Look up interfaces
2042 if (m == NULL) {
2043 m = lookup_method_in_all_interfaces(name, signature, DefaultsLookupMode::find);
2044 }
2045 return m;
2046 }
2047
2048 // lookup a method in all the interfaces that this class implements
2049 // Do NOT return private or static methods, new in JDK8 which are not externally visible
2050 // They should only be found in the initial InterfaceMethodRef
2051 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
2052 Symbol* signature,
2053 DefaultsLookupMode defaults_mode) const {
2054 Array<InstanceKlass*>* all_ifs = transitive_interfaces();
2055 int num_ifs = all_ifs->length();
2056 InstanceKlass *ik = NULL;
2057 for (int i = 0; i < num_ifs; i++) {
2058 ik = all_ifs->at(i);
2059 Method* m = ik->lookup_method(name, signature);
2060 if (m != NULL && m->is_public() && !m->is_static() &&
2061 ((defaults_mode != DefaultsLookupMode::skip) || !m->is_default_method())) {
2062 return m;
2063 }
2064 }
2065 return NULL;
2066 }
2067
2068 /* jni_id_for for jfieldIds only */
2069 JNIid* InstanceKlass::jni_id_for(int offset) {
2070 MutexLocker ml(JfieldIdCreation_lock);
2071 JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
2072 if (probe == NULL) {
2073 // Allocate new static field identifier
2074 probe = new JNIid(this, offset, jni_ids());
2075 set_jni_ids(probe);
2076 }
2077 return probe;
2078 }
2079
2080 u2 InstanceKlass::enclosing_method_data(int offset) const {
2081 const Array<jushort>* const inner_class_list = inner_classes();
2082 if (inner_class_list == NULL) {
2083 return 0;
2084 }
2085 const int length = inner_class_list->length();
2086 if (length % inner_class_next_offset == 0) {
2087 return 0;
2088 }
2089 const int index = length - enclosing_method_attribute_size;
2090 assert(offset < enclosing_method_attribute_size, "invalid offset");
2091 return inner_class_list->at(index + offset);
2092 }
2093
2094 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
2095 u2 method_index) {
2096 Array<jushort>* inner_class_list = inner_classes();
2097 assert (inner_class_list != NULL, "_inner_classes list is not set up");
2098 int length = inner_class_list->length();
2099 if (length % inner_class_next_offset == enclosing_method_attribute_size) {
2100 int index = length - enclosing_method_attribute_size;
2101 inner_class_list->at_put(
2102 index + enclosing_method_class_index_offset, class_index);
2103 inner_class_list->at_put(
2104 index + enclosing_method_method_index_offset, method_index);
2105 }
2106 }
2107
2108 // Lookup or create a jmethodID.
2109 // This code is called by the VMThread and JavaThreads so the
2110 // locking has to be done very carefully to avoid deadlocks
2111 // and/or other cache consistency problems.
2112 //
2113 jmethodID InstanceKlass::get_jmethod_id(const methodHandle& method_h) {
2114 size_t idnum = (size_t)method_h->method_idnum();
2115 jmethodID* jmeths = methods_jmethod_ids_acquire();
2116 size_t length = 0;
2117 jmethodID id = NULL;
2118
2119 // We use a double-check locking idiom here because this cache is
2120 // performance sensitive. In the normal system, this cache only
2121 // transitions from NULL to non-NULL which is safe because we use
2122 // release_set_methods_jmethod_ids() to advertise the new cache.
2123 // A partially constructed cache should never be seen by a racing
2124 // thread. We also use release_store() to save a new jmethodID
2125 // in the cache so a partially constructed jmethodID should never be
2126 // seen either. Cache reads of existing jmethodIDs proceed without a
2127 // lock, but cache writes of a new jmethodID requires uniqueness and
2128 // creation of the cache itself requires no leaks so a lock is
2129 // generally acquired in those two cases.
2130 //
2131 // If the RedefineClasses() API has been used, then this cache can
2132 // grow and we'll have transitions from non-NULL to bigger non-NULL.
2133 // Cache creation requires no leaks and we require safety between all
2134 // cache accesses and freeing of the old cache so a lock is generally
2135 // acquired when the RedefineClasses() API has been used.
2136
2137 if (jmeths != NULL) {
2138 // the cache already exists
2139 if (!idnum_can_increment()) {
2140 // the cache can't grow so we can just get the current values
2141 get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2142 } else {
2143 // cache can grow so we have to be more careful
2144 if (Threads::number_of_threads() == 0 ||
2145 SafepointSynchronize::is_at_safepoint()) {
2146 // we're single threaded or at a safepoint - no locking needed
2147 get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2148 } else {
2149 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2150 get_jmethod_id_length_value(jmeths, idnum, &length, &id);
2151 }
2152 }
2153 }
2154 // implied else:
2155 // we need to allocate a cache so default length and id values are good
2156
2157 if (jmeths == NULL || // no cache yet
2158 length <= idnum || // cache is too short
2159 id == NULL) { // cache doesn't contain entry
2160
2161 // This function can be called by the VMThread so we have to do all
2162 // things that might block on a safepoint before grabbing the lock.
2163 // Otherwise, we can deadlock with the VMThread or have a cache
2164 // consistency issue. These vars keep track of what we might have
2165 // to free after the lock is dropped.
2166 jmethodID to_dealloc_id = NULL;
2167 jmethodID* to_dealloc_jmeths = NULL;
2168
2169 // may not allocate new_jmeths or use it if we allocate it
2170 jmethodID* new_jmeths = NULL;
2171 if (length <= idnum) {
2172 // allocate a new cache that might be used
2173 size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count());
2174 new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass);
2175 memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
2176 // cache size is stored in element[0], other elements offset by one
2177 new_jmeths[0] = (jmethodID)size;
2178 }
2179
2180 // allocate a new jmethodID that might be used
2181 jmethodID new_id = NULL;
2182 if (method_h->is_old() && !method_h->is_obsolete()) {
2183 // The method passed in is old (but not obsolete), we need to use the current version
2184 Method* current_method = method_with_idnum((int)idnum);
2185 assert(current_method != NULL, "old and but not obsolete, so should exist");
2186 new_id = Method::make_jmethod_id(class_loader_data(), current_method);
2187 } else {
2188 // It is the current version of the method or an obsolete method,
2189 // use the version passed in
2190 new_id = Method::make_jmethod_id(class_loader_data(), method_h());
2191 }
2192
2193 if (Threads::number_of_threads() == 0 ||
2194 SafepointSynchronize::is_at_safepoint()) {
2195 // we're single threaded or at a safepoint - no locking needed
2196 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths,
2197 &to_dealloc_id, &to_dealloc_jmeths);
2198 } else {
2199 MutexLocker ml(JmethodIdCreation_lock, Mutex::_no_safepoint_check_flag);
2200 id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths,
2201 &to_dealloc_id, &to_dealloc_jmeths);
2202 }
2203
2204 // The lock has been dropped so we can free resources.
2205 // Free up either the old cache or the new cache if we allocated one.
2206 if (to_dealloc_jmeths != NULL) {
2207 FreeHeap(to_dealloc_jmeths);
2208 }
2209 // free up the new ID since it wasn't needed
2210 if (to_dealloc_id != NULL) {
2211 Method::destroy_jmethod_id(class_loader_data(), to_dealloc_id);
2212 }
2213 }
2214 return id;
2215 }
2216
2217 // Figure out how many jmethodIDs haven't been allocated, and make
2218 // sure space for them is pre-allocated. This makes getting all
2219 // method ids much, much faster with classes with more than 8
2220 // methods, and has a *substantial* effect on performance with jvmti
2221 // code that loads all jmethodIDs for all classes.
2222 void InstanceKlass::ensure_space_for_methodids(int start_offset) {
2223 int new_jmeths = 0;
2224 int length = methods()->length();
2225 for (int index = start_offset; index < length; index++) {
2226 Method* m = methods()->at(index);
2227 jmethodID id = m->find_jmethod_id_or_null();
2228 if (id == NULL) {
2229 new_jmeths++;
2230 }
2231 }
2232 if (new_jmeths != 0) {
2233 Method::ensure_jmethod_ids(class_loader_data(), new_jmeths);
2234 }
2235 }
2236
2237 // Common code to fetch the jmethodID from the cache or update the
2238 // cache with the new jmethodID. This function should never do anything
2239 // that causes the caller to go to a safepoint or we can deadlock with
2240 // the VMThread or have cache consistency issues.
2241 //
2242 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update(
2243 size_t idnum, jmethodID new_id,
2244 jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
2245 jmethodID** to_dealloc_jmeths_p) {
2246 assert(new_id != NULL, "sanity check");
2247 assert(to_dealloc_id_p != NULL, "sanity check");
2248 assert(to_dealloc_jmeths_p != NULL, "sanity check");
2249 assert(Threads::number_of_threads() == 0 ||
2250 SafepointSynchronize::is_at_safepoint() ||
2251 JmethodIdCreation_lock->owned_by_self(), "sanity check");
2252
2253 // reacquire the cache - we are locked, single threaded or at a safepoint
2254 jmethodID* jmeths = methods_jmethod_ids_acquire();
2255 jmethodID id = NULL;
2256 size_t length = 0;
2257
2258 if (jmeths == NULL || // no cache yet
2259 (length = (size_t)jmeths[0]) <= idnum) { // cache is too short
2260 if (jmeths != NULL) {
2261 // copy any existing entries from the old cache
2262 for (size_t index = 0; index < length; index++) {
2263 new_jmeths[index+1] = jmeths[index+1];
2264 }
2265 *to_dealloc_jmeths_p = jmeths; // save old cache for later delete
2266 }
2267 release_set_methods_jmethod_ids(jmeths = new_jmeths);
2268 } else {
2269 // fetch jmethodID (if any) from the existing cache
2270 id = jmeths[idnum+1];
2271 *to_dealloc_jmeths_p = new_jmeths; // save new cache for later delete
2272 }
2273 if (id == NULL) {
2274 // No matching jmethodID in the existing cache or we have a new
2275 // cache or we just grew the cache. This cache write is done here
2276 // by the first thread to win the foot race because a jmethodID
2277 // needs to be unique once it is generally available.
2278 id = new_id;
2279
2280 // The jmethodID cache can be read while unlocked so we have to
2281 // make sure the new jmethodID is complete before installing it
2282 // in the cache.
2283 Atomic::release_store(&jmeths[idnum+1], id);
2284 } else {
2285 *to_dealloc_id_p = new_id; // save new id for later delete
2286 }
2287 return id;
2288 }
2289
2290
2291 // Common code to get the jmethodID cache length and the jmethodID
2292 // value at index idnum if there is one.
2293 //
2294 void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache,
2295 size_t idnum, size_t *length_p, jmethodID* id_p) {
2296 assert(cache != NULL, "sanity check");
2297 assert(length_p != NULL, "sanity check");
2298 assert(id_p != NULL, "sanity check");
2299
2300 // cache size is stored in element[0], other elements offset by one
2301 *length_p = (size_t)cache[0];
2302 if (*length_p <= idnum) { // cache is too short
2303 *id_p = NULL;
2304 } else {
2305 *id_p = cache[idnum+1]; // fetch jmethodID (if any)
2306 }
2307 }
2308
2309
2310 // Lookup a jmethodID, NULL if not found. Do no blocking, no allocations, no handles
2311 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2312 size_t idnum = (size_t)method->method_idnum();
2313 jmethodID* jmeths = methods_jmethod_ids_acquire();
2314 size_t length; // length assigned as debugging crumb
2315 jmethodID id = NULL;
2316 if (jmeths != NULL && // If there is a cache
2317 (length = (size_t)jmeths[0]) > idnum) { // and if it is long enough,
2318 id = jmeths[idnum+1]; // Look up the id (may be NULL)
2319 }
2320 return id;
2321 }
2322
2323 inline DependencyContext InstanceKlass::dependencies() {
2324 DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2325 return dep_context;
2326 }
2327
2328 int InstanceKlass::mark_dependent_nmethods(KlassDepChange& changes) {
2329 return dependencies().mark_dependent_nmethods(changes);
2330 }
2331
2332 void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2333 dependencies().add_dependent_nmethod(nm);
2334 }
2335
2336 void InstanceKlass::remove_dependent_nmethod(nmethod* nm) {
2337 dependencies().remove_dependent_nmethod(nm);
2338 }
2339
2340 void InstanceKlass::clean_dependency_context() {
2341 dependencies().clean_unloading_dependents();
2342 }
2343
2344 #ifndef PRODUCT
2345 void InstanceKlass::print_dependent_nmethods(bool verbose) {
2346 dependencies().print_dependent_nmethods(verbose);
2347 }
2348
2349 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2350 return dependencies().is_dependent_nmethod(nm);
2351 }
2352 #endif //PRODUCT
2353
2354 void InstanceKlass::clean_weak_instanceklass_links() {
2355 clean_implementors_list();
2356 clean_method_data();
2357 }
2358
2359 void InstanceKlass::clean_implementors_list() {
2360 assert(is_loader_alive(), "this klass should be live");
2361 if (is_interface()) {
2362 assert (ClassUnloading, "only called for ClassUnloading");
2363 for (;;) {
2364 // Use load_acquire due to competing with inserts
2365 InstanceKlass* impl = Atomic::load_acquire(adr_implementor());
2366 if (impl != NULL && !impl->is_loader_alive()) {
2367 // NULL this field, might be an unloaded instance klass or NULL
2368 InstanceKlass* volatile* iklass = adr_implementor();
2369 if (Atomic::cmpxchg(iklass, impl, (InstanceKlass*)NULL) == impl) {
2370 // Successfully unlinking implementor.
2371 if (log_is_enabled(Trace, class, unload)) {
2372 ResourceMark rm;
2373 log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2374 }
2375 return;
2376 }
2377 } else {
2378 return;
2379 }
2380 }
2381 }
2382 }
2383
2384 void InstanceKlass::clean_method_data() {
2385 for (int m = 0; m < methods()->length(); m++) {
2386 MethodData* mdo = methods()->at(m)->method_data();
2387 if (mdo != NULL) {
2388 MutexLocker ml(SafepointSynchronize::is_at_safepoint() ? NULL : mdo->extra_data_lock());
2389 mdo->clean_method_data(/*always_clean*/false);
2390 }
2391 }
2392 }
2393
2394 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
2395 Klass::metaspace_pointers_do(it);
2396
2397 if (log_is_enabled(Trace, cds)) {
2398 ResourceMark rm;
2399 log_trace(cds)("Iter(InstanceKlass): %p (%s)", this, external_name());
2400 }
2401
2402 it->push(&_annotations);
2403 it->push((Klass**)&_array_klasses);
2404 if (!is_rewritten()) {
2405 it->push(&_constants, MetaspaceClosure::_writable);
2406 } else {
2407 it->push(&_constants);
2408 }
2409 it->push(&_inner_classes);
2410 #if INCLUDE_JVMTI
2411 it->push(&_previous_versions);
2412 #endif
2413 it->push(&_methods);
2414 it->push(&_default_methods);
2415 it->push(&_local_interfaces);
2416 it->push(&_transitive_interfaces);
2417 it->push(&_method_ordering);
2418 if (!is_rewritten()) {
2419 it->push(&_default_vtable_indices, MetaspaceClosure::_writable);
2420 } else {
2421 it->push(&_default_vtable_indices);
2422 }
2423
2424 // _fields might be written into by Rewriter::scan_method() -> fd.set_has_initialized_final_update()
2425 it->push(&_fields, MetaspaceClosure::_writable);
2426
2427 if (itable_length() > 0) {
2428 itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2429 int method_table_offset_in_words = ioe->offset()/wordSize;
2430 int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
2431 / itableOffsetEntry::size();
2432
2433 for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
2434 if (ioe->interface_klass() != NULL) {
2435 it->push(ioe->interface_klass_addr());
2436 itableMethodEntry* ime = ioe->first_method_entry(this);
2437 int n = klassItable::method_count_for_interface(ioe->interface_klass());
2438 for (int index = 0; index < n; index ++) {
2439 it->push(ime[index].method_addr());
2440 }
2441 }
2442 }
2443 }
2444
2445 it->push(&_nest_members);
2446 it->push(&_permitted_subclasses);
2447 it->push(&_record_components);
2448 }
2449
2450 void InstanceKlass::remove_unshareable_info() {
2451
2452 if (can_be_verified_at_dumptime()) {
2453 // Remember this so we can avoid walking the hierarchy at runtime.
2454 set_verified_at_dump_time();
2455 }
2456
2457 Klass::remove_unshareable_info();
2458
2459 if (SystemDictionaryShared::has_class_failed_verification(this)) {
2460 // Classes are attempted to link during dumping and may fail,
2461 // but these classes are still in the dictionary and class list in CLD.
2462 // If the class has failed verification, there is nothing else to remove.
2463 return;
2464 }
2465
2466 // Reset to the 'allocated' state to prevent any premature accessing to
2467 // a shared class at runtime while the class is still being loaded and
2468 // restored. A class' init_state is set to 'loaded' at runtime when it's
2469 // being added to class hierarchy (see SystemDictionary:::add_to_hierarchy()).
2470 _init_state = allocated;
2471
2472 { // Otherwise this needs to take out the Compile_lock.
2473 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2474 init_implementor();
2475 }
2476
2477 constants()->remove_unshareable_info();
2478
2479 for (int i = 0; i < methods()->length(); i++) {
2480 Method* m = methods()->at(i);
2481 m->remove_unshareable_info();
2482 }
2483
2484 // do array classes also.
2485 if (array_klasses() != NULL) {
2486 array_klasses()->remove_unshareable_info();
2487 }
2488
2489 // These are not allocated from metaspace. They are safe to set to NULL.
2490 _source_debug_extension = NULL;
2491 _dep_context = NULL;
2492 _osr_nmethods_head = NULL;
2493 #if INCLUDE_JVMTI
2494 _breakpoints = NULL;
2495 _previous_versions = NULL;
2496 _cached_class_file = NULL;
2497 _jvmti_cached_class_field_map = NULL;
2498 #endif
2499
2500 _init_thread = NULL;
2501 _methods_jmethod_ids = NULL;
2502 _jni_ids = NULL;
2503 _oop_map_cache = NULL;
2504 // clear _nest_host to ensure re-load at runtime
2505 _nest_host = NULL;
2506 init_shared_package_entry();
2507 _dep_context_last_cleaned = 0;
2508 }
2509
2510 void InstanceKlass::remove_java_mirror() {
2511 Klass::remove_java_mirror();
2512
2513 // do array classes also.
2514 if (array_klasses() != NULL) {
2515 array_klasses()->remove_java_mirror();
2516 }
2517 }
2518
2519 void InstanceKlass::init_shared_package_entry() {
2520 #if !INCLUDE_CDS_JAVA_HEAP
2521 _package_entry = NULL;
2522 #else
2523 if (!MetaspaceShared::use_full_module_graph()) {
2524 _package_entry = NULL;
2525 } else if (DynamicDumpSharedSpaces) {
2526 if (!MetaspaceShared::is_in_shared_metaspace(_package_entry)) {
2527 _package_entry = NULL;
2528 }
2529 } else {
2530 if (is_shared_unregistered_class()) {
2531 _package_entry = NULL;
2532 } else {
2533 _package_entry = PackageEntry::get_archived_entry(_package_entry);
2534 }
2535 }
2536 ArchivePtrMarker::mark_pointer((address**)&_package_entry);
2537 #endif
2538 }
2539
2540 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain,
2541 PackageEntry* pkg_entry, TRAPS) {
2542 // SystemDictionary::add_to_hierarchy() sets the init_state to loaded
2543 // before the InstanceKlass is added to the SystemDictionary. Make
2544 // sure the current state is <loaded.
2545 assert(!is_loaded(), "invalid init state");
2546 assert(!shared_loading_failed(), "Must not try to load failed class again");
2547 set_package(loader_data, pkg_entry, CHECK);
2548 Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
2549
2550 Array<Method*>* methods = this->methods();
2551 int num_methods = methods->length();
2552 for (int index = 0; index < num_methods; ++index) {
2553 methods->at(index)->restore_unshareable_info(CHECK);
2554 }
2555 #if INCLUDE_JVMTI
2556 if (JvmtiExport::has_redefined_a_class()) {
2557 // Reinitialize vtable because RedefineClasses may have changed some
2558 // entries in this vtable for super classes so the CDS vtable might
2559 // point to old or obsolete entries. RedefineClasses doesn't fix up
2560 // vtables in the shared system dictionary, only the main one.
2561 // It also redefines the itable too so fix that too.
2562 // First fix any default methods that point to a super class that may
2563 // have been redefined.
2564 bool trace_name_printed = false;
2565 adjust_default_methods(&trace_name_printed);
2566 vtable().initialize_vtable();
2567 itable().initialize_itable();
2568 }
2569 #endif
2570
2571 // restore constant pool resolved references
2572 constants()->restore_unshareable_info(CHECK);
2573
2574 if (array_klasses() != NULL) {
2575 // To get a consistent list of classes we need MultiArray_lock to ensure
2576 // array classes aren't observed while they are being restored.
2577 MutexLocker ml(MultiArray_lock);
2578 // Array classes have null protection domain.
2579 // --> see ArrayKlass::complete_create_array_klass()
2580 array_klasses()->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK);
2581 }
2582
2583 // Initialize current biased locking state.
2584 if (UseBiasedLocking && BiasedLocking::enabled()) {
2585 set_prototype_header(markWord::biased_locking_prototype());
2586 }
2587
2588 // Initialize @ValueBased class annotation
2589 if (DiagnoseSyncOnValueBasedClasses && has_value_based_class_annotation()) {
2590 set_is_value_based();
2591 markWord prototype = markWord::prototype();
2592 #ifdef _LP64
2593 if (UseCompactObjectHeaders) {
2594 prototype = prototype.set_klass(this);
2595 }
2596 #endif
2597 set_prototype_header(prototype);
2598 }
2599 }
2600
2601 // Check if a class or any of its supertypes has a version older than 50.
2602 // CDS will not perform verification of old classes during dump time because
2603 // without changing the old verifier, the verification constraint cannot be
2604 // retrieved during dump time.
2605 // Verification of archived old classes will be performed during run time.
2606 bool InstanceKlass::can_be_verified_at_dumptime() const {
2607 if (major_version() < 50 /*JAVA_6_VERSION*/) {
2608 return false;
2609 }
2610 if (java_super() != NULL && !java_super()->can_be_verified_at_dumptime()) {
2611 return false;
2612 }
2613 Array<InstanceKlass*>* interfaces = local_interfaces();
2614 int len = interfaces->length();
2615 for (int i = 0; i < len; i++) {
2616 if (!interfaces->at(i)->can_be_verified_at_dumptime()) {
2617 return false;
2618 }
2619 }
2620 return true;
2621 }
2622
2623 void InstanceKlass::set_shared_class_loader_type(s2 loader_type) {
2624 switch (loader_type) {
2625 case ClassLoader::BOOT_LOADER:
2626 _misc_flags |= _misc_is_shared_boot_class;
2627 break;
2628 case ClassLoader::PLATFORM_LOADER:
2629 _misc_flags |= _misc_is_shared_platform_class;
2630 break;
2631 case ClassLoader::APP_LOADER:
2632 _misc_flags |= _misc_is_shared_app_class;
2633 break;
2634 default:
2635 ShouldNotReachHere();
2636 break;
2637 }
2638 }
2639
2640 void InstanceKlass::assign_class_loader_type() {
2641 ClassLoaderData *cld = class_loader_data();
2642 if (cld->is_boot_class_loader_data()) {
2643 set_shared_class_loader_type(ClassLoader::BOOT_LOADER);
2644 }
2645 else if (cld->is_platform_class_loader_data()) {
2646 set_shared_class_loader_type(ClassLoader::PLATFORM_LOADER);
2647 }
2648 else if (cld->is_system_class_loader_data()) {
2649 set_shared_class_loader_type(ClassLoader::APP_LOADER);
2650 }
2651 }
2652
2653 #if INCLUDE_JVMTI
2654 static void clear_all_breakpoints(Method* m) {
2655 m->clear_all_breakpoints();
2656 }
2657 #endif
2658
2659 void InstanceKlass::unload_class(InstanceKlass* ik) {
2660 // Release dependencies.
2661 ik->dependencies().remove_all_dependents();
2662
2663 // notify the debugger
2664 if (JvmtiExport::should_post_class_unload()) {
2665 JvmtiExport::post_class_unload(ik);
2666 }
2667
2668 // notify ClassLoadingService of class unload
2669 ClassLoadingService::notify_class_unloaded(ik);
2670
2671 if (Arguments::is_dumping_archive()) {
2672 SystemDictionaryShared::remove_dumptime_info(ik);
2673 }
2674
2675 if (log_is_enabled(Info, class, unload)) {
2676 ResourceMark rm;
2677 log_info(class, unload)("unloading class %s " INTPTR_FORMAT, ik->external_name(), p2i(ik));
2678 }
2679
2680 Events::log_class_unloading(Thread::current(), ik);
2681
2682 #if INCLUDE_JFR
2683 assert(ik != NULL, "invariant");
2684 EventClassUnload event;
2685 event.set_unloadedClass(ik);
2686 event.set_definingClassLoader(ik->class_loader_data());
2687 event.commit();
2688 #endif
2689 }
2690
2691 static void method_release_C_heap_structures(Method* m) {
2692 m->release_C_heap_structures();
2693 }
2694
2695 // Called also by InstanceKlass::deallocate_contents, with false for release_constant_pool.
2696 void InstanceKlass::release_C_heap_structures(bool release_constant_pool) {
2697 // Clean up C heap
2698 Klass::release_C_heap_structures();
2699
2700 // Deallocate and call destructors for MDO mutexes
2701 methods_do(method_release_C_heap_structures);
2702
2703 // Deallocate oop map cache
2704 if (_oop_map_cache != NULL) {
2705 delete _oop_map_cache;
2706 _oop_map_cache = NULL;
2707 }
2708
2709 // Deallocate JNI identifiers for jfieldIDs
2710 JNIid::deallocate(jni_ids());
2711 set_jni_ids(NULL);
2712
2713 jmethodID* jmeths = methods_jmethod_ids_acquire();
2714 if (jmeths != (jmethodID*)NULL) {
2715 release_set_methods_jmethod_ids(NULL);
2716 FreeHeap(jmeths);
2717 }
2718
2719 assert(_dep_context == NULL,
2720 "dependencies should already be cleaned");
2721
2722 #if INCLUDE_JVMTI
2723 // Deallocate breakpoint records
2724 if (breakpoints() != 0x0) {
2725 methods_do(clear_all_breakpoints);
2726 assert(breakpoints() == 0x0, "should have cleared breakpoints");
2727 }
2728
2729 // deallocate the cached class file
2730 if (_cached_class_file != NULL) {
2731 os::free(_cached_class_file);
2732 _cached_class_file = NULL;
2733 }
2734 #endif
2735
2736 FREE_C_HEAP_ARRAY(char, _source_debug_extension);
2737
2738 if (release_constant_pool) {
2739 constants()->release_C_heap_structures();
2740 }
2741 }
2742
2743 void InstanceKlass::set_source_debug_extension(const char* array, int length) {
2744 if (array == NULL) {
2745 _source_debug_extension = NULL;
2746 } else {
2747 // Adding one to the attribute length in order to store a null terminator
2748 // character could cause an overflow because the attribute length is
2749 // already coded with an u4 in the classfile, but in practice, it's
2750 // unlikely to happen.
2751 assert((length+1) > length, "Overflow checking");
2752 char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
2753 for (int i = 0; i < length; i++) {
2754 sde[i] = array[i];
2755 }
2756 sde[length] = '\0';
2757 _source_debug_extension = sde;
2758 }
2759 }
2760
2761 const char* InstanceKlass::signature_name() const {
2762 int hash_len = 0;
2763 char hash_buf[40];
2764
2765 // Get the internal name as a c string
2766 const char* src = (const char*) (name()->as_C_string());
2767 const int src_length = (int)strlen(src);
2768
2769 char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3);
2770
2771 // Add L as type indicator
2772 int dest_index = 0;
2773 dest[dest_index++] = JVM_SIGNATURE_CLASS;
2774
2775 // Add the actual class name
2776 for (int src_index = 0; src_index < src_length; ) {
2777 dest[dest_index++] = src[src_index++];
2778 }
2779
2780 if (is_hidden()) { // Replace the last '+' with a '.'.
2781 for (int index = (int)src_length; index > 0; index--) {
2782 if (dest[index] == '+') {
2783 dest[index] = JVM_SIGNATURE_DOT;
2784 break;
2785 }
2786 }
2787 }
2788
2789 // If we have a hash, append it
2790 for (int hash_index = 0; hash_index < hash_len; ) {
2791 dest[dest_index++] = hash_buf[hash_index++];
2792 }
2793
2794 // Add the semicolon and the NULL
2795 dest[dest_index++] = JVM_SIGNATURE_ENDCLASS;
2796 dest[dest_index] = '\0';
2797 return dest;
2798 }
2799
2800 ModuleEntry* InstanceKlass::module() const {
2801 if (is_hidden() &&
2802 in_unnamed_package() &&
2803 class_loader_data()->has_class_mirror_holder()) {
2804 // For a non-strong hidden class defined to an unnamed package,
2805 // its (class held) CLD will not have an unnamed module created for it.
2806 // Two choices to find the correct ModuleEntry:
2807 // 1. If hidden class is within a nest, use nest host's module
2808 // 2. Find the unnamed module off from the class loader
2809 // For now option #2 is used since a nest host is not set until
2810 // after the instance class is created in jvm_lookup_define_class().
2811 if (class_loader_data()->is_boot_class_loader_data()) {
2812 return ClassLoaderData::the_null_class_loader_data()->unnamed_module();
2813 } else {
2814 oop module = java_lang_ClassLoader::unnamedModule(class_loader_data()->class_loader());
2815 assert(java_lang_Module::is_instance(module), "Not an instance of java.lang.Module");
2816 return java_lang_Module::module_entry(module);
2817 }
2818 }
2819
2820 // Class is in a named package
2821 if (!in_unnamed_package()) {
2822 return _package_entry->module();
2823 }
2824
2825 // Class is in an unnamed package, return its loader's unnamed module
2826 return class_loader_data()->unnamed_module();
2827 }
2828
2829 void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_entry, TRAPS) {
2830
2831 // ensure java/ packages only loaded by boot or platform builtin loaders
2832 // not needed for shared class since CDS does not archive prohibited classes.
2833 if (!is_shared()) {
2834 check_prohibited_package(name(), loader_data, CHECK);
2835 }
2836
2837 if (is_shared() && _package_entry != NULL) {
2838 if (MetaspaceShared::use_full_module_graph() && _package_entry == pkg_entry) {
2839 // we can use the saved package
2840 assert(MetaspaceShared::is_in_shared_metaspace(_package_entry), "must be");
2841 return;
2842 } else {
2843 _package_entry = NULL;
2844 }
2845 }
2846
2847 // ClassLoader::package_from_class_name has already incremented the refcount of the symbol
2848 // it returns, so we need to decrement it when the current function exits.
2849 TempNewSymbol from_class_name =
2850 (pkg_entry != NULL) ? NULL : ClassLoader::package_from_class_name(name());
2851
2852 Symbol* pkg_name;
2853 if (pkg_entry != NULL) {
2854 pkg_name = pkg_entry->name();
2855 } else {
2856 pkg_name = from_class_name;
2857 }
2858
2859 if (pkg_name != NULL && loader_data != NULL) {
2860
2861 // Find in class loader's package entry table.
2862 _package_entry = pkg_entry != NULL ? pkg_entry : loader_data->packages()->lookup_only(pkg_name);
2863
2864 // If the package name is not found in the loader's package
2865 // entry table, it is an indication that the package has not
2866 // been defined. Consider it defined within the unnamed module.
2867 if (_package_entry == NULL) {
2868
2869 if (!ModuleEntryTable::javabase_defined()) {
2870 // Before java.base is defined during bootstrapping, define all packages in
2871 // the java.base module. If a non-java.base package is erroneously placed
2872 // in the java.base module it will be caught later when java.base
2873 // is defined by ModuleEntryTable::verify_javabase_packages check.
2874 assert(ModuleEntryTable::javabase_moduleEntry() != NULL, JAVA_BASE_NAME " module is NULL");
2875 _package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_moduleEntry());
2876 } else {
2877 assert(loader_data->unnamed_module() != NULL, "unnamed module is NULL");
2878 _package_entry = loader_data->packages()->lookup(pkg_name,
2879 loader_data->unnamed_module());
2880 }
2881
2882 // A package should have been successfully created
2883 DEBUG_ONLY(ResourceMark rm(THREAD));
2884 assert(_package_entry != NULL, "Package entry for class %s not found, loader %s",
2885 name()->as_C_string(), loader_data->loader_name_and_id());
2886 }
2887
2888 if (log_is_enabled(Debug, module)) {
2889 ResourceMark rm(THREAD);
2890 ModuleEntry* m = _package_entry->module();
2891 log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s",
2892 external_name(),
2893 pkg_name->as_C_string(),
2894 loader_data->loader_name_and_id(),
2895 (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE));
2896 }
2897 } else {
2898 ResourceMark rm(THREAD);
2899 log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
2900 external_name(),
2901 (loader_data != NULL) ? loader_data->loader_name_and_id() : "NULL",
2902 UNNAMED_MODULE);
2903 }
2904 }
2905
2906 // Function set_classpath_index ensures that for a non-null _package_entry
2907 // of the InstanceKlass, the entry is in the boot loader's package entry table.
2908 // It then sets the classpath_index in the package entry record.
2909 //
2910 // The classpath_index field is used to find the entry on the boot loader class
2911 // path for packages with classes loaded by the boot loader from -Xbootclasspath/a
2912 // in an unnamed module. It is also used to indicate (for all packages whose
2913 // classes are loaded by the boot loader) that at least one of the package's
2914 // classes has been loaded.
2915 void InstanceKlass::set_classpath_index(s2 path_index) {
2916 if (_package_entry != NULL) {
2917 DEBUG_ONLY(PackageEntryTable* pkg_entry_tbl = ClassLoaderData::the_null_class_loader_data()->packages();)
2918 assert(pkg_entry_tbl->lookup_only(_package_entry->name()) == _package_entry, "Should be same");
2919 assert(path_index != -1, "Unexpected classpath_index");
2920 _package_entry->set_classpath_index(path_index);
2921 }
2922 }
2923
2924 // different versions of is_same_class_package
2925
2926 bool InstanceKlass::is_same_class_package(const Klass* class2) const {
2927 oop classloader1 = this->class_loader();
2928 PackageEntry* classpkg1 = this->package();
2929 if (class2->is_objArray_klass()) {
2930 class2 = ObjArrayKlass::cast(class2)->bottom_klass();
2931 }
2932
2933 oop classloader2;
2934 PackageEntry* classpkg2;
2935 if (class2->is_instance_klass()) {
2936 classloader2 = class2->class_loader();
2937 classpkg2 = class2->package();
2938 } else {
2939 assert(class2->is_typeArray_klass(), "should be type array");
2940 classloader2 = NULL;
2941 classpkg2 = NULL;
2942 }
2943
2944 // Same package is determined by comparing class loader
2945 // and package entries. Both must be the same. This rule
2946 // applies even to classes that are defined in the unnamed
2947 // package, they still must have the same class loader.
2948 if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
2949 return true;
2950 }
2951
2952 return false;
2953 }
2954
2955 // return true if this class and other_class are in the same package. Classloader
2956 // and classname information is enough to determine a class's package
2957 bool InstanceKlass::is_same_class_package(oop other_class_loader,
2958 const Symbol* other_class_name) const {
2959 if (class_loader() != other_class_loader) {
2960 return false;
2961 }
2962 if (name()->fast_compare(other_class_name) == 0) {
2963 return true;
2964 }
2965
2966 {
2967 ResourceMark rm;
2968
2969 bool bad_class_name = false;
2970 TempNewSymbol other_pkg = ClassLoader::package_from_class_name(other_class_name, &bad_class_name);
2971 if (bad_class_name) {
2972 return false;
2973 }
2974 // Check that package_from_class_name() returns NULL, not "", if there is no package.
2975 assert(other_pkg == NULL || other_pkg->utf8_length() > 0, "package name is empty string");
2976
2977 const Symbol* const this_package_name =
2978 this->package() != NULL ? this->package()->name() : NULL;
2979
2980 if (this_package_name == NULL || other_pkg == NULL) {
2981 // One of the two doesn't have a package. Only return true if the other
2982 // one also doesn't have a package.
2983 return this_package_name == other_pkg;
2984 }
2985
2986 // Check if package is identical
2987 return this_package_name->fast_compare(other_pkg) == 0;
2988 }
2989 }
2990
2991 static bool is_prohibited_package_slow(Symbol* class_name) {
2992 // Caller has ResourceMark
2993 int length;
2994 jchar* unicode = class_name->as_unicode(length);
2995 return (length >= 5 &&
2996 unicode[0] == 'j' &&
2997 unicode[1] == 'a' &&
2998 unicode[2] == 'v' &&
2999 unicode[3] == 'a' &&
3000 unicode[4] == '/');
3001 }
3002
3003 // Only boot and platform class loaders can define classes in "java/" packages.
3004 void InstanceKlass::check_prohibited_package(Symbol* class_name,
3005 ClassLoaderData* loader_data,
3006 TRAPS) {
3007 if (!loader_data->is_boot_class_loader_data() &&
3008 !loader_data->is_platform_class_loader_data() &&
3009 class_name != NULL && class_name->utf8_length() >= 5) {
3010 ResourceMark rm(THREAD);
3011 bool prohibited;
3012 const u1* base = class_name->base();
3013 if ((base[0] | base[1] | base[2] | base[3] | base[4]) & 0x80) {
3014 prohibited = is_prohibited_package_slow(class_name);
3015 } else {
3016 char* name = class_name->as_C_string();
3017 prohibited = (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/');
3018 }
3019 if (prohibited) {
3020 TempNewSymbol pkg_name = ClassLoader::package_from_class_name(class_name);
3021 assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'");
3022 char* name = pkg_name->as_C_string();
3023 const char* class_loader_name = loader_data->loader_name_and_id();
3024 StringUtils::replace_no_expand(name, "/", ".");
3025 const char* msg_text1 = "Class loader (instance of): ";
3026 const char* msg_text2 = " tried to load prohibited package name: ";
3027 size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
3028 char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len);
3029 jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
3030 THROW_MSG(vmSymbols::java_lang_SecurityException(), message);
3031 }
3032 }
3033 return;
3034 }
3035
3036 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const {
3037 constantPoolHandle i_cp(THREAD, constants());
3038 for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
3039 int ioff = iter.inner_class_info_index();
3040 if (ioff != 0) {
3041 // Check to see if the name matches the class we're looking for
3042 // before attempting to find the class.
3043 if (i_cp->klass_name_at_matches(this, ioff)) {
3044 Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
3045 if (this == inner_klass) {
3046 *ooff = iter.outer_class_info_index();
3047 *noff = iter.inner_name_index();
3048 return true;
3049 }
3050 }
3051 }
3052 }
3053 return false;
3054 }
3055
3056 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const {
3057 InstanceKlass* outer_klass = NULL;
3058 *inner_is_member = false;
3059 int ooff = 0, noff = 0;
3060 bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD);
3061 if (has_inner_classes_attr) {
3062 constantPoolHandle i_cp(THREAD, constants());
3063 if (ooff != 0) {
3064 Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);
3065 if (!ok->is_instance_klass()) {
3066 // If the outer class is not an instance klass then it cannot have
3067 // declared any inner classes.
3068 ResourceMark rm(THREAD);
3069 Exceptions::fthrow(
3070 THREAD_AND_LOCATION,
3071 vmSymbols::java_lang_IncompatibleClassChangeError(),
3072 "%s and %s disagree on InnerClasses attribute",
3073 ok->external_name(),
3074 external_name());
3075 return NULL;
3076 }
3077 outer_klass = InstanceKlass::cast(ok);
3078 *inner_is_member = true;
3079 }
3080 if (NULL == outer_klass) {
3081 // It may be a local class; try for that.
3082 int encl_method_class_idx = enclosing_method_class_index();
3083 if (encl_method_class_idx != 0) {
3084 Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL);
3085 outer_klass = InstanceKlass::cast(ok);
3086 *inner_is_member = false;
3087 }
3088 }
3089 }
3090
3091 // If no inner class attribute found for this class.
3092 if (NULL == outer_klass) return NULL;
3093
3094 // Throws an exception if outer klass has not declared k as an inner klass
3095 // We need evidence that each klass knows about the other, or else
3096 // the system could allow a spoof of an inner class to gain access rights.
3097 Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL);
3098 return outer_klass;
3099 }
3100
3101 jint InstanceKlass::compute_modifier_flags() const {
3102 jint access = access_flags().as_int();
3103
3104 // But check if it happens to be member class.
3105 InnerClassesIterator iter(this);
3106 for (; !iter.done(); iter.next()) {
3107 int ioff = iter.inner_class_info_index();
3108 // Inner class attribute can be zero, skip it.
3109 // Strange but true: JVM spec. allows null inner class refs.
3110 if (ioff == 0) continue;
3111
3112 // only look at classes that are already loaded
3113 // since we are looking for the flags for our self.
3114 Symbol* inner_name = constants()->klass_name_at(ioff);
3115 if (name() == inner_name) {
3116 // This is really a member class.
3117 access = iter.inner_access_flags();
3118 break;
3119 }
3120 }
3121 // Remember to strip ACC_SUPER bit
3122 return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
3123 }
3124
3125 jint InstanceKlass::jvmti_class_status() const {
3126 jint result = 0;
3127
3128 if (is_linked()) {
3129 result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
3130 }
3131
3132 if (is_initialized()) {
3133 assert(is_linked(), "Class status is not consistent");
3134 result |= JVMTI_CLASS_STATUS_INITIALIZED;
3135 }
3136 if (is_in_error_state()) {
3137 result |= JVMTI_CLASS_STATUS_ERROR;
3138 }
3139 return result;
3140 }
3141
3142 Method* InstanceKlass::method_at_itable(InstanceKlass* holder, int index, TRAPS) {
3143 bool implements_interface; // initialized by method_at_itable_or_null
3144 Method* m = method_at_itable_or_null(holder, index,
3145 implements_interface); // out parameter
3146 if (m != NULL) {
3147 assert(implements_interface, "sanity");
3148 return m;
3149 } else if (implements_interface) {
3150 // Throw AbstractMethodError since corresponding itable slot is empty.
3151 THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
3152 } else {
3153 // If the interface isn't implemented by the receiver class,
3154 // the VM should throw IncompatibleClassChangeError.
3155 ResourceMark rm(THREAD);
3156 stringStream ss;
3157 bool same_module = (module() == holder->module());
3158 ss.print("Receiver class %s does not implement "
3159 "the interface %s defining the method to be called "
3160 "(%s%s%s)",
3161 external_name(), holder->external_name(),
3162 (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
3163 (same_module) ? "" : "; ",
3164 (same_module) ? "" : holder->class_in_module_of_loader());
3165 THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string());
3166 }
3167 }
3168
3169 Method* InstanceKlass::method_at_itable_or_null(InstanceKlass* holder, int index, bool& implements_interface) {
3170 klassItable itable(this);
3171 for (int i = 0; i < itable.size_offset_table(); i++) {
3172 itableOffsetEntry* offset_entry = itable.offset_entry(i);
3173 if (offset_entry->interface_klass() == holder) {
3174 implements_interface = true;
3175 itableMethodEntry* ime = offset_entry->first_method_entry(this);
3176 Method* m = ime[index].method();
3177 return m;
3178 }
3179 }
3180 implements_interface = false;
3181 return NULL; // offset entry not found
3182 }
3183
3184 int InstanceKlass::vtable_index_of_interface_method(Method* intf_method) {
3185 assert(is_linked(), "required");
3186 assert(intf_method->method_holder()->is_interface(), "not an interface method");
3187 assert(is_subtype_of(intf_method->method_holder()), "interface not implemented");
3188
3189 int vtable_index = Method::invalid_vtable_index;
3190 Symbol* name = intf_method->name();
3191 Symbol* signature = intf_method->signature();
3192
3193 // First check in default method array
3194 if (!intf_method->is_abstract() && default_methods() != NULL) {
3195 int index = find_method_index(default_methods(),
3196 name, signature,
3197 Klass::OverpassLookupMode::find,
3198 Klass::StaticLookupMode::find,
3199 Klass::PrivateLookupMode::find);
3200 if (index >= 0) {
3201 vtable_index = default_vtable_indices()->at(index);
3202 }
3203 }
3204 if (vtable_index == Method::invalid_vtable_index) {
3205 // get vtable_index for miranda methods
3206 klassVtable vt = vtable();
3207 vtable_index = vt.index_of_miranda(name, signature);
3208 }
3209 return vtable_index;
3210 }
3211
3212 #if INCLUDE_JVMTI
3213 // update default_methods for redefineclasses for methods that are
3214 // not yet in the vtable due to concurrent subclass define and superinterface
3215 // redefinition
3216 // Note: those in the vtable, should have been updated via adjust_method_entries
3217 void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
3218 // search the default_methods for uses of either obsolete or EMCP methods
3219 if (default_methods() != NULL) {
3220 for (int index = 0; index < default_methods()->length(); index ++) {
3221 Method* old_method = default_methods()->at(index);
3222 if (old_method == NULL || !old_method->is_old()) {
3223 continue; // skip uninteresting entries
3224 }
3225 assert(!old_method->is_deleted(), "default methods may not be deleted");
3226 Method* new_method = old_method->get_new_method();
3227 default_methods()->at_put(index, new_method);
3228
3229 if (log_is_enabled(Info, redefine, class, update)) {
3230 ResourceMark rm;
3231 if (!(*trace_name_printed)) {
3232 log_info(redefine, class, update)
3233 ("adjust: klassname=%s default methods from name=%s",
3234 external_name(), old_method->method_holder()->external_name());
3235 *trace_name_printed = true;
3236 }
3237 log_debug(redefine, class, update, vtables)
3238 ("default method update: %s(%s) ",
3239 new_method->name()->as_C_string(), new_method->signature()->as_C_string());
3240 }
3241 }
3242 }
3243 }
3244 #endif // INCLUDE_JVMTI
3245
3246 // On-stack replacement stuff
3247 void InstanceKlass::add_osr_nmethod(nmethod* n) {
3248 assert_lock_strong(CompiledMethod_lock);
3249 #ifndef PRODUCT
3250 nmethod* prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3251 assert(prev == NULL || !prev->is_in_use() COMPILER2_PRESENT(|| StressRecompilation),
3252 "redundant OSR recompilation detected. memory leak in CodeCache!");
3253 #endif
3254 // only one compilation can be active
3255 assert(n->is_osr_method(), "wrong kind of nmethod");
3256 n->set_osr_link(osr_nmethods_head());
3257 set_osr_nmethods_head(n);
3258 // Raise the highest osr level if necessary
3259 n->method()->set_highest_osr_comp_level(MAX2(n->method()->highest_osr_comp_level(), n->comp_level()));
3260
3261 // Get rid of the osr methods for the same bci that have lower levels.
3262 for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3263 nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3264 if (inv != NULL && inv->is_in_use()) {
3265 inv->make_not_entrant();
3266 }
3267 }
3268 }
3269
3270 // Remove osr nmethod from the list. Return true if found and removed.
3271 bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3272 // This is a short non-blocking critical region, so the no safepoint check is ok.
3273 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock
3274 , Mutex::_no_safepoint_check_flag);
3275 assert(n->is_osr_method(), "wrong kind of nmethod");
3276 nmethod* last = NULL;
3277 nmethod* cur = osr_nmethods_head();
3278 int max_level = CompLevel_none; // Find the max comp level excluding n
3279 Method* m = n->method();
3280 // Search for match
3281 bool found = false;
3282 while(cur != NULL && cur != n) {
3283 if (m == cur->method()) {
3284 // Find max level before n
3285 max_level = MAX2(max_level, cur->comp_level());
3286 }
3287 last = cur;
3288 cur = cur->osr_link();
3289 }
3290 nmethod* next = NULL;
3291 if (cur == n) {
3292 found = true;
3293 next = cur->osr_link();
3294 if (last == NULL) {
3295 // Remove first element
3296 set_osr_nmethods_head(next);
3297 } else {
3298 last->set_osr_link(next);
3299 }
3300 }
3301 n->set_osr_link(NULL);
3302 cur = next;
3303 while (cur != NULL) {
3304 // Find max level after n
3305 if (m == cur->method()) {
3306 max_level = MAX2(max_level, cur->comp_level());
3307 }
3308 cur = cur->osr_link();
3309 }
3310 m->set_highest_osr_comp_level(max_level);
3311 return found;
3312 }
3313
3314 int InstanceKlass::mark_osr_nmethods(const Method* m) {
3315 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
3316 Mutex::_no_safepoint_check_flag);
3317 nmethod* osr = osr_nmethods_head();
3318 int found = 0;
3319 while (osr != NULL) {
3320 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3321 if (osr->method() == m) {
3322 osr->mark_for_deoptimization();
3323 found++;
3324 }
3325 osr = osr->osr_link();
3326 }
3327 return found;
3328 }
3329
3330 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
3331 MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
3332 Mutex::_no_safepoint_check_flag);
3333 nmethod* osr = osr_nmethods_head();
3334 nmethod* best = NULL;
3335 while (osr != NULL) {
3336 assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3337 // There can be a time when a c1 osr method exists but we are waiting
3338 // for a c2 version. When c2 completes its osr nmethod we will trash
3339 // the c1 version and only be able to find the c2 version. However
3340 // while we overflow in the c1 code at back branches we don't want to
3341 // try and switch to the same code as we are already running
3342
3343 if (osr->method() == m &&
3344 (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
3345 if (match_level) {
3346 if (osr->comp_level() == comp_level) {
3347 // Found a match - return it.
3348 return osr;
3349 }
3350 } else {
3351 if (best == NULL || (osr->comp_level() > best->comp_level())) {
3352 if (osr->comp_level() == CompilationPolicy::highest_compile_level()) {
3353 // Found the best possible - return it.
3354 return osr;
3355 }
3356 best = osr;
3357 }
3358 }
3359 }
3360 osr = osr->osr_link();
3361 }
3362
3363 assert(match_level == false || best == NULL, "shouldn't pick up anything if match_level is set");
3364 if (best != NULL && best->comp_level() >= comp_level) {
3365 return best;
3366 }
3367 return NULL;
3368 }
3369
3370 // -----------------------------------------------------------------------------------------------------
3371 // Printing
3372
3373 #define BULLET " - "
3374
3375 static const char* state_names[] = {
3376 "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
3377 };
3378
3379 static void print_vtable(intptr_t* start, int len, outputStream* st) {
3380 for (int i = 0; i < len; i++) {
3381 intptr_t e = start[i];
3382 st->print("%d : " INTPTR_FORMAT, i, e);
3383 if (MetaspaceObj::is_valid((Metadata*)e)) {
3384 st->print(" ");
3385 ((Metadata*)e)->print_value_on(st);
3386 }
3387 st->cr();
3388 }
3389 }
3390
3391 static void print_vtable(vtableEntry* start, int len, outputStream* st) {
3392 return print_vtable(reinterpret_cast<intptr_t*>(start), len, st);
3393 }
3394
3395 void InstanceKlass::print_on(outputStream* st) const {
3396 assert(is_klass(), "must be klass");
3397 Klass::print_on(st);
3398
3399 st->print(BULLET"instance size: %d", size_helper()); st->cr();
3400 st->print(BULLET"klass size: %d", size()); st->cr();
3401 st->print(BULLET"access: "); access_flags().print_on(st); st->cr();
3402 st->print(BULLET"state: "); st->print_cr("%s", state_names[_init_state]);
3403 st->print(BULLET"name: "); name()->print_value_on(st); st->cr();
3404 st->print(BULLET"super: "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
3405 st->print(BULLET"sub: ");
3406 Klass* sub = subklass();
3407 int n;
3408 for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) {
3409 if (n < MaxSubklassPrintSize) {
3410 sub->print_value_on(st);
3411 st->print(" ");
3412 }
3413 }
3414 if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT " more klasses...)", n - MaxSubklassPrintSize);
3415 st->cr();
3416
3417 if (is_interface()) {
3418 st->print_cr(BULLET"nof implementors: %d", nof_implementors());
3419 if (nof_implementors() == 1) {
3420 st->print_cr(BULLET"implementor: ");
3421 st->print(" ");
3422 implementor()->print_value_on(st);
3423 st->cr();
3424 }
3425 }
3426
3427 st->print(BULLET"arrays: "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
3428 st->print(BULLET"methods: "); methods()->print_value_on(st); st->cr();
3429 if (Verbose || WizardMode) {
3430 Array<Method*>* method_array = methods();
3431 for (int i = 0; i < method_array->length(); i++) {
3432 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3433 }
3434 }
3435 st->print(BULLET"method ordering: "); method_ordering()->print_value_on(st); st->cr();
3436 st->print(BULLET"default_methods: "); default_methods()->print_value_on(st); st->cr();
3437 if (Verbose && default_methods() != NULL) {
3438 Array<Method*>* method_array = default_methods();
3439 for (int i = 0; i < method_array->length(); i++) {
3440 st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3441 }
3442 }
3443 if (default_vtable_indices() != NULL) {
3444 st->print(BULLET"default vtable indices: "); default_vtable_indices()->print_value_on(st); st->cr();
3445 }
3446 st->print(BULLET"local interfaces: "); local_interfaces()->print_value_on(st); st->cr();
3447 st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
3448 st->print(BULLET"constants: "); constants()->print_value_on(st); st->cr();
3449 if (class_loader_data() != NULL) {
3450 st->print(BULLET"class loader data: ");
3451 class_loader_data()->print_value_on(st);
3452 st->cr();
3453 }
3454 if (source_file_name() != NULL) {
3455 st->print(BULLET"source file: ");
3456 source_file_name()->print_value_on(st);
3457 st->cr();
3458 }
3459 if (source_debug_extension() != NULL) {
3460 st->print(BULLET"source debug extension: ");
3461 st->print("%s", source_debug_extension());
3462 st->cr();
3463 }
3464 st->print(BULLET"class annotations: "); class_annotations()->print_value_on(st); st->cr();
3465 st->print(BULLET"class type annotations: "); class_type_annotations()->print_value_on(st); st->cr();
3466 st->print(BULLET"field annotations: "); fields_annotations()->print_value_on(st); st->cr();
3467 st->print(BULLET"field type annotations: "); fields_type_annotations()->print_value_on(st); st->cr();
3468 {
3469 bool have_pv = false;
3470 // previous versions are linked together through the InstanceKlass
3471 for (InstanceKlass* pv_node = previous_versions();
3472 pv_node != NULL;
3473 pv_node = pv_node->previous_versions()) {
3474 if (!have_pv)
3475 st->print(BULLET"previous version: ");
3476 have_pv = true;
3477 pv_node->constants()->print_value_on(st);
3478 }
3479 if (have_pv) st->cr();
3480 }
3481
3482 if (generic_signature() != NULL) {
3483 st->print(BULLET"generic signature: ");
3484 generic_signature()->print_value_on(st);
3485 st->cr();
3486 }
3487 st->print(BULLET"inner classes: "); inner_classes()->print_value_on(st); st->cr();
3488 st->print(BULLET"nest members: "); nest_members()->print_value_on(st); st->cr();
3489 if (record_components() != NULL) {
3490 st->print(BULLET"record components: "); record_components()->print_value_on(st); st->cr();
3491 }
3492 st->print(BULLET"permitted subclasses: "); permitted_subclasses()->print_value_on(st); st->cr();
3493 if (java_mirror() != NULL) {
3494 st->print(BULLET"java mirror: ");
3495 java_mirror()->print_value_on(st);
3496 st->cr();
3497 } else {
3498 st->print_cr(BULLET"java mirror: NULL");
3499 }
3500 st->print(BULLET"vtable length %d (start addr: " INTPTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr();
3501 if (vtable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_vtable(), vtable_length(), st);
3502 st->print(BULLET"itable length %d (start addr: " INTPTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr();
3503 if (itable_length() > 0 && (Verbose || WizardMode)) print_vtable(start_of_itable(), itable_length(), st);
3504 st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
3505 FieldPrinter print_static_field(st);
3506 ((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
3507 st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
3508 FieldPrinter print_nonstatic_field(st);
3509 InstanceKlass* ik = const_cast<InstanceKlass*>(this);
3510 ik->do_nonstatic_fields(&print_nonstatic_field);
3511
3512 st->print(BULLET"non-static oop maps: ");
3513 OopMapBlock* map = start_of_nonstatic_oop_maps();
3514 OopMapBlock* end_map = map + nonstatic_oop_map_count();
3515 while (map < end_map) {
3516 st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
3517 map++;
3518 }
3519 st->cr();
3520 }
3521
3522 void InstanceKlass::print_value_on(outputStream* st) const {
3523 assert(is_klass(), "must be klass");
3524 if (Verbose || WizardMode) access_flags().print_on(st);
3525 name()->print_value_on(st);
3526 }
3527
3528 void FieldPrinter::do_field(fieldDescriptor* fd) {
3529 _st->print(BULLET);
3530 if (_obj == NULL) {
3531 fd->print_on(_st);
3532 _st->cr();
3533 } else {
3534 fd->print_on_for(_st, _obj);
3535 _st->cr();
3536 }
3537 }
3538
3539
3540 void InstanceKlass::oop_print_on(oop obj, outputStream* st) {
3541 Klass::oop_print_on(obj, st);
3542
3543 if (this == vmClasses::String_klass()) {
3544 typeArrayOop value = java_lang_String::value(obj);
3545 juint length = java_lang_String::length(obj);
3546 if (value != NULL &&
3547 value->is_typeArray() &&
3548 length <= (juint) value->length()) {
3549 st->print(BULLET"string: ");
3550 java_lang_String::print(obj, st);
3551 st->cr();
3552 if (!WizardMode) return; // that is enough
3553 }
3554 }
3555
3556 st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj));
3557 FieldPrinter print_field(st, obj);
3558 do_nonstatic_fields(&print_field);
3559
3560 if (this == vmClasses::Class_klass()) {
3561 st->print(BULLET"signature: ");
3562 java_lang_Class::print_signature(obj, st);
3563 st->cr();
3564 Klass* mirrored_klass = java_lang_Class::as_Klass(obj);
3565 st->print(BULLET"fake entry for mirror: ");
3566 Metadata::print_value_on_maybe_null(st, mirrored_klass);
3567 st->cr();
3568 Klass* array_klass = java_lang_Class::array_klass_acquire(obj);
3569 st->print(BULLET"fake entry for array: ");
3570 Metadata::print_value_on_maybe_null(st, array_klass);
3571 st->cr();
3572 st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj));
3573 st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj));
3574 Klass* real_klass = java_lang_Class::as_Klass(obj);
3575 if (real_klass != NULL && real_klass->is_instance_klass()) {
3576 InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
3577 }
3578 } else if (this == vmClasses::MethodType_klass()) {
3579 st->print(BULLET"signature: ");
3580 java_lang_invoke_MethodType::print_signature(obj, st);
3581 st->cr();
3582 }
3583 }
3584
3585 #ifndef PRODUCT
3586
3587 bool InstanceKlass::verify_itable_index(int i) {
3588 int method_count = klassItable::method_count_for_interface(this);
3589 assert(i >= 0 && i < method_count, "index out of bounds");
3590 return true;
3591 }
3592
3593 #endif //PRODUCT
3594
3595 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
3596 st->print("a ");
3597 name()->print_value_on(st);
3598 obj->print_address_on(st);
3599 if (this == vmClasses::String_klass()
3600 && java_lang_String::value(obj) != NULL) {
3601 ResourceMark rm;
3602 int len = java_lang_String::length(obj);
3603 int plen = (len < 24 ? len : 12);
3604 char* str = java_lang_String::as_utf8_string(obj, 0, plen);
3605 st->print(" = \"%s\"", str);
3606 if (len > plen)
3607 st->print("...[%d]", len);
3608 } else if (this == vmClasses::Class_klass()) {
3609 Klass* k = java_lang_Class::as_Klass(obj);
3610 st->print(" = ");
3611 if (k != NULL) {
3612 k->print_value_on(st);
3613 } else {
3614 const char* tname = type2name(java_lang_Class::primitive_type(obj));
3615 st->print("%s", tname ? tname : "type?");
3616 }
3617 } else if (this == vmClasses::MethodType_klass()) {
3618 st->print(" = ");
3619 java_lang_invoke_MethodType::print_signature(obj, st);
3620 } else if (java_lang_boxing_object::is_instance(obj)) {
3621 st->print(" = ");
3622 java_lang_boxing_object::print(obj, st);
3623 } else if (this == vmClasses::LambdaForm_klass()) {
3624 oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
3625 if (vmentry != NULL) {
3626 st->print(" => ");
3627 vmentry->print_value_on(st);
3628 }
3629 } else if (this == vmClasses::MemberName_klass()) {
3630 Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
3631 if (vmtarget != NULL) {
3632 st->print(" = ");
3633 vmtarget->print_value_on(st);
3634 } else {
3635 oop clazz = java_lang_invoke_MemberName::clazz(obj);
3636 oop name = java_lang_invoke_MemberName::name(obj);
3637 if (clazz != NULL) {
3638 clazz->print_value_on(st);
3639 } else {
3640 st->print("NULL");
3641 }
3642 st->print(".");
3643 if (name != NULL) {
3644 name->print_value_on(st);
3645 } else {
3646 st->print("NULL");
3647 }
3648 }
3649 }
3650 }
3651
3652 const char* InstanceKlass::internal_name() const {
3653 return external_name();
3654 }
3655
3656 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
3657 const ModuleEntry* module_entry,
3658 const ClassFileStream* cfs) const {
3659 log_to_classlist();
3660
3661 if (!log_is_enabled(Info, class, load)) {
3662 return;
3663 }
3664
3665 ResourceMark rm;
3666 LogMessage(class, load) msg;
3667 stringStream info_stream;
3668
3669 // Name and class hierarchy info
3670 info_stream.print("%s", external_name());
3671
3672 // Source
3673 if (cfs != NULL) {
3674 if (cfs->source() != NULL) {
3675 const char* module_name = (module_entry->name() == NULL) ? UNNAMED_MODULE : module_entry->name()->as_C_string();
3676 if (module_name != NULL) {
3677 // When the boot loader created the stream, it didn't know the module name
3678 // yet. Let's format it now.
3679 if (cfs->from_boot_loader_modules_image()) {
3680 info_stream.print(" source: jrt:/%s", module_name);
3681 } else {
3682 info_stream.print(" source: %s", cfs->source());
3683 }
3684 } else {
3685 info_stream.print(" source: %s", cfs->source());
3686 }
3687 } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
3688 Thread* current = Thread::current();
3689 Klass* caller = current->is_Java_thread() ?
3690 current->as_Java_thread()->security_get_caller_class(1):
3691 NULL;
3692 // caller can be NULL, for example, during a JVMTI VM_Init hook
3693 if (caller != NULL) {
3694 info_stream.print(" source: instance of %s", caller->external_name());
3695 } else {
3696 // source is unknown
3697 }
3698 } else {
3699 oop class_loader = loader_data->class_loader();
3700 info_stream.print(" source: %s", class_loader->klass()->external_name());
3701 }
3702 } else {
3703 assert(this->is_shared(), "must be");
3704 if (MetaspaceShared::is_shared_dynamic((void*)this)) {
3705 info_stream.print(" source: shared objects file (top)");
3706 } else {
3707 info_stream.print(" source: shared objects file");
3708 }
3709 }
3710
3711 msg.info("%s", info_stream.as_string());
3712
3713 if (log_is_enabled(Debug, class, load)) {
3714 stringStream debug_stream;
3715
3716 // Class hierarchy info
3717 debug_stream.print(" klass: " INTPTR_FORMAT " super: " INTPTR_FORMAT,
3718 p2i(this), p2i(superklass()));
3719
3720 // Interfaces
3721 if (local_interfaces() != NULL && local_interfaces()->length() > 0) {
3722 debug_stream.print(" interfaces:");
3723 int length = local_interfaces()->length();
3724 for (int i = 0; i < length; i++) {
3725 debug_stream.print(" " INTPTR_FORMAT,
3726 p2i(InstanceKlass::cast(local_interfaces()->at(i))));
3727 }
3728 }
3729
3730 // Class loader
3731 debug_stream.print(" loader: [");
3732 loader_data->print_value_on(&debug_stream);
3733 debug_stream.print("]");
3734
3735 // Classfile checksum
3736 if (cfs) {
3737 debug_stream.print(" bytes: %d checksum: %08x",
3738 cfs->length(),
3739 ClassLoader::crc32(0, (const char*)cfs->buffer(),
3740 cfs->length()));
3741 }
3742
3743 msg.debug("%s", debug_stream.as_string());
3744 }
3745 }
3746
3747 // Verification
3748
3749 class VerifyFieldClosure: public BasicOopIterateClosure {
3750 protected:
3751 template <class T> void do_oop_work(T* p) {
3752 oop obj = RawAccess<>::oop_load(p);
3753 if (!oopDesc::is_oop_or_null(obj)) {
3754 tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj));
3755 Universe::print_on(tty);
3756 guarantee(false, "boom");
3757 }
3758 }
3759 public:
3760 virtual void do_oop(oop* p) { VerifyFieldClosure::do_oop_work(p); }
3761 virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
3762 };
3763
3764 void InstanceKlass::verify_on(outputStream* st) {
3765 #ifndef PRODUCT
3766 // Avoid redundant verifies, this really should be in product.
3767 if (_verify_count == Universe::verify_count()) return;
3768 _verify_count = Universe::verify_count();
3769 #endif
3770
3771 // Verify Klass
3772 Klass::verify_on(st);
3773
3774 // Verify that klass is present in ClassLoaderData
3775 guarantee(class_loader_data()->contains_klass(this),
3776 "this class isn't found in class loader data");
3777
3778 // Verify vtables
3779 if (is_linked()) {
3780 // $$$ This used to be done only for m/s collections. Doing it
3781 // always seemed a valid generalization. (DLD -- 6/00)
3782 vtable().verify(st);
3783 }
3784
3785 // Verify first subklass
3786 if (subklass() != NULL) {
3787 guarantee(subklass()->is_klass(), "should be klass");
3788 }
3789
3790 // Verify siblings
3791 Klass* super = this->super();
3792 Klass* sib = next_sibling();
3793 if (sib != NULL) {
3794 if (sib == this) {
3795 fatal("subclass points to itself " PTR_FORMAT, p2i(sib));
3796 }
3797
3798 guarantee(sib->is_klass(), "should be klass");
3799 guarantee(sib->super() == super, "siblings should have same superklass");
3800 }
3801
3802 // Verify local interfaces
3803 if (local_interfaces()) {
3804 Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
3805 for (int j = 0; j < local_interfaces->length(); j++) {
3806 InstanceKlass* e = local_interfaces->at(j);
3807 guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
3808 }
3809 }
3810
3811 // Verify transitive interfaces
3812 if (transitive_interfaces() != NULL) {
3813 Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
3814 for (int j = 0; j < transitive_interfaces->length(); j++) {
3815 InstanceKlass* e = transitive_interfaces->at(j);
3816 guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
3817 }
3818 }
3819
3820 // Verify methods
3821 if (methods() != NULL) {
3822 Array<Method*>* methods = this->methods();
3823 for (int j = 0; j < methods->length(); j++) {
3824 guarantee(methods->at(j)->is_method(), "non-method in methods array");
3825 }
3826 for (int j = 0; j < methods->length() - 1; j++) {
3827 Method* m1 = methods->at(j);
3828 Method* m2 = methods->at(j + 1);
3829 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
3830 }
3831 }
3832
3833 // Verify method ordering
3834 if (method_ordering() != NULL) {
3835 Array<int>* method_ordering = this->method_ordering();
3836 int length = method_ordering->length();
3837 if (JvmtiExport::can_maintain_original_method_order() ||
3838 ((UseSharedSpaces || Arguments::is_dumping_archive()) && length != 0)) {
3839 guarantee(length == methods()->length(), "invalid method ordering length");
3840 jlong sum = 0;
3841 for (int j = 0; j < length; j++) {
3842 int original_index = method_ordering->at(j);
3843 guarantee(original_index >= 0, "invalid method ordering index");
3844 guarantee(original_index < length, "invalid method ordering index");
3845 sum += original_index;
3846 }
3847 // Verify sum of indices 0,1,...,length-1
3848 guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
3849 } else {
3850 guarantee(length == 0, "invalid method ordering length");
3851 }
3852 }
3853
3854 // Verify default methods
3855 if (default_methods() != NULL) {
3856 Array<Method*>* methods = this->default_methods();
3857 for (int j = 0; j < methods->length(); j++) {
3858 guarantee(methods->at(j)->is_method(), "non-method in methods array");
3859 }
3860 for (int j = 0; j < methods->length() - 1; j++) {
3861 Method* m1 = methods->at(j);
3862 Method* m2 = methods->at(j + 1);
3863 guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
3864 }
3865 }
3866
3867 // Verify JNI static field identifiers
3868 if (jni_ids() != NULL) {
3869 jni_ids()->verify(this);
3870 }
3871
3872 // Verify other fields
3873 if (constants() != NULL) {
3874 guarantee(constants()->is_constantPool(), "should be constant pool");
3875 }
3876 }
3877
3878 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
3879 Klass::oop_verify_on(obj, st);
3880 VerifyFieldClosure blk;
3881 obj->oop_iterate(&blk);
3882 }
3883
3884
3885 // JNIid class for jfieldIDs only
3886 // Note to reviewers:
3887 // These JNI functions are just moved over to column 1 and not changed
3888 // in the compressed oops workspace.
3889 JNIid::JNIid(Klass* holder, int offset, JNIid* next) {
3890 _holder = holder;
3891 _offset = offset;
3892 _next = next;
3893 debug_only(_is_static_field_id = false;)
3894 }
3895
3896
3897 JNIid* JNIid::find(int offset) {
3898 JNIid* current = this;
3899 while (current != NULL) {
3900 if (current->offset() == offset) return current;
3901 current = current->next();
3902 }
3903 return NULL;
3904 }
3905
3906 void JNIid::deallocate(JNIid* current) {
3907 while (current != NULL) {
3908 JNIid* next = current->next();
3909 delete current;
3910 current = next;
3911 }
3912 }
3913
3914
3915 void JNIid::verify(Klass* holder) {
3916 int first_field_offset = InstanceMirrorKlass::offset_of_static_fields();
3917 int end_field_offset;
3918 end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize);
3919
3920 JNIid* current = this;
3921 while (current != NULL) {
3922 guarantee(current->holder() == holder, "Invalid klass in JNIid");
3923 #ifdef ASSERT
3924 int o = current->offset();
3925 if (current->is_static_field_id()) {
3926 guarantee(o >= first_field_offset && o < end_field_offset, "Invalid static field offset in JNIid");
3927 }
3928 #endif
3929 current = current->next();
3930 }
3931 }
3932
3933 void InstanceKlass::set_init_state(ClassState state) {
3934 #ifdef ASSERT
3935 bool good_state = is_shared() ? (_init_state <= state)
3936 : (_init_state < state);
3937 assert(good_state || state == allocated, "illegal state transition");
3938 #endif
3939 assert(_init_thread == NULL, "should be cleared before state change");
3940 _init_state = (u1)state;
3941 }
3942
3943 #if INCLUDE_JVMTI
3944
3945 // RedefineClasses() support for previous versions
3946
3947 // Globally, there is at least one previous version of a class to walk
3948 // during class unloading, which is saved because old methods in the class
3949 // are still running. Otherwise the previous version list is cleaned up.
3950 bool InstanceKlass::_has_previous_versions = false;
3951
3952 // Returns true if there are previous versions of a class for class
3953 // unloading only. Also resets the flag to false. purge_previous_version
3954 // will set the flag to true if there are any left, i.e., if there's any
3955 // work to do for next time. This is to avoid the expensive code cache
3956 // walk in CLDG::clean_deallocate_lists().
3957 bool InstanceKlass::has_previous_versions_and_reset() {
3958 bool ret = _has_previous_versions;
3959 log_trace(redefine, class, iklass, purge)("Class unloading: has_previous_versions = %s",
3960 ret ? "true" : "false");
3961 _has_previous_versions = false;
3962 return ret;
3963 }
3964
3965 // This nulls out jmethodIDs for all methods in 'klass'
3966 // It needs to be called explicitly for all previous versions of a class because these may not be cleaned up
3967 // during class unloading.
3968 // We can not use the jmethodID cache associated with klass directly because the 'previous' versions
3969 // do not have the jmethodID cache filled in. Instead, we need to lookup jmethodID for each method and this
3970 // is expensive - O(n) for one jmethodID lookup. For all contained methods it is O(n^2).
3971 // The reason for expensive jmethodID lookup for each method is that there is no direct link between method and jmethodID.
3972 void InstanceKlass::clear_jmethod_ids(InstanceKlass* klass) {
3973 Array<Method*>* method_refs = klass->methods();
3974 for (int k = 0; k < method_refs->length(); k++) {
3975 Method* method = method_refs->at(k);
3976 if (method != nullptr && method->is_obsolete()) {
3977 method->clear_jmethod_id();
3978 }
3979 }
3980 }
3981
3982 // Purge previous versions before adding new previous versions of the class and
3983 // during class unloading.
3984 void InstanceKlass::purge_previous_version_list() {
3985 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
3986 assert(has_been_redefined(), "Should only be called for main class");
3987
3988 // Quick exit.
3989 if (previous_versions() == NULL) {
3990 return;
3991 }
3992
3993 // This klass has previous versions so see what we can cleanup
3994 // while it is safe to do so.
3995
3996 int deleted_count = 0; // leave debugging breadcrumbs
3997 int live_count = 0;
3998 ClassLoaderData* loader_data = class_loader_data();
3999 assert(loader_data != NULL, "should never be null");
4000
4001 ResourceMark rm;
4002 log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name());
4003
4004 // previous versions are linked together through the InstanceKlass
4005 InstanceKlass* pv_node = previous_versions();
4006 InstanceKlass* last = this;
4007 int version = 0;
4008
4009 // check the previous versions list
4010 for (; pv_node != NULL; ) {
4011
4012 ConstantPool* pvcp = pv_node->constants();
4013 assert(pvcp != NULL, "cp ref was unexpectedly cleared");
4014
4015 if (!pvcp->on_stack()) {
4016 // If the constant pool isn't on stack, none of the methods
4017 // are executing. Unlink this previous_version.
4018 // The previous version InstanceKlass is on the ClassLoaderData deallocate list
4019 // so will be deallocated during the next phase of class unloading.
4020 log_trace(redefine, class, iklass, purge)
4021 ("previous version " INTPTR_FORMAT " is dead.", p2i(pv_node));
4022 // Unlink from previous version list.
4023 assert(pv_node->class_loader_data() == loader_data, "wrong loader_data");
4024 InstanceKlass* next = pv_node->previous_versions();
4025 clear_jmethod_ids(pv_node); // jmethodID maintenance for the unloaded class
4026 pv_node->link_previous_versions(NULL); // point next to NULL
4027 last->link_previous_versions(next);
4028 // Delete this node directly. Nothing is referring to it and we don't
4029 // want it to increase the counter for metadata to delete in CLDG.
4030 MetadataFactory::free_metadata(loader_data, pv_node);
4031 pv_node = next;
4032 deleted_count++;
4033 version++;
4034 continue;
4035 } else {
4036 log_trace(redefine, class, iklass, purge)("previous version " INTPTR_FORMAT " is alive", p2i(pv_node));
4037 assert(pvcp->pool_holder() != NULL, "Constant pool with no holder");
4038 guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
4039 live_count++;
4040 // found a previous version for next time we do class unloading
4041 _has_previous_versions = true;
4042 }
4043
4044 // next previous version
4045 last = pv_node;
4046 pv_node = pv_node->previous_versions();
4047 version++;
4048 }
4049 log_trace(redefine, class, iklass, purge)
4050 ("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
4051 }
4052
4053 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
4054 int emcp_method_count) {
4055 int obsolete_method_count = old_methods->length() - emcp_method_count;
4056
4057 if (emcp_method_count != 0 && obsolete_method_count != 0 &&
4058 _previous_versions != NULL) {
4059 // We have a mix of obsolete and EMCP methods so we have to
4060 // clear out any matching EMCP method entries the hard way.
4061 int local_count = 0;
4062 for (int i = 0; i < old_methods->length(); i++) {
4063 Method* old_method = old_methods->at(i);
4064 if (old_method->is_obsolete()) {
4065 // only obsolete methods are interesting
4066 Symbol* m_name = old_method->name();
4067 Symbol* m_signature = old_method->signature();
4068
4069 // previous versions are linked together through the InstanceKlass
4070 int j = 0;
4071 for (InstanceKlass* prev_version = _previous_versions;
4072 prev_version != NULL;
4073 prev_version = prev_version->previous_versions(), j++) {
4074
4075 Array<Method*>* method_refs = prev_version->methods();
4076 for (int k = 0; k < method_refs->length(); k++) {
4077 Method* method = method_refs->at(k);
4078
4079 if (!method->is_obsolete() &&
4080 method->name() == m_name &&
4081 method->signature() == m_signature) {
4082 // The current RedefineClasses() call has made all EMCP
4083 // versions of this method obsolete so mark it as obsolete
4084 log_trace(redefine, class, iklass, add)
4085 ("%s(%s): flush obsolete method @%d in version @%d",
4086 m_name->as_C_string(), m_signature->as_C_string(), k, j);
4087
4088 method->set_is_obsolete();
4089 break;
4090 }
4091 }
4092
4093 // The previous loop may not find a matching EMCP method, but
4094 // that doesn't mean that we can optimize and not go any
4095 // further back in the PreviousVersion generations. The EMCP
4096 // method for this generation could have already been made obsolete,
4097 // but there still may be an older EMCP method that has not
4098 // been made obsolete.
4099 }
4100
4101 if (++local_count >= obsolete_method_count) {
4102 // no more obsolete methods so bail out now
4103 break;
4104 }
4105 }
4106 }
4107 }
4108 }
4109
4110 // Save the scratch_class as the previous version if any of the methods are running.
4111 // The previous_versions are used to set breakpoints in EMCP methods and they are
4112 // also used to clean MethodData links to redefined methods that are no longer running.
4113 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
4114 int emcp_method_count) {
4115 assert(Thread::current()->is_VM_thread(),
4116 "only VMThread can add previous versions");
4117
4118 ResourceMark rm;
4119 log_trace(redefine, class, iklass, add)
4120 ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
4121
4122 // Clean out old previous versions for this class
4123 purge_previous_version_list();
4124
4125 // Mark newly obsolete methods in remaining previous versions. An EMCP method from
4126 // a previous redefinition may be made obsolete by this redefinition.
4127 Array<Method*>* old_methods = scratch_class->methods();
4128 mark_newly_obsolete_methods(old_methods, emcp_method_count);
4129
4130 // If the constant pool for this previous version of the class
4131 // is not marked as being on the stack, then none of the methods
4132 // in this previous version of the class are on the stack so
4133 // we don't need to add this as a previous version.
4134 ConstantPool* cp_ref = scratch_class->constants();
4135 if (!cp_ref->on_stack()) {
4136 log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
4137 scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
4138 return;
4139 }
4140
4141 // Add previous version if any methods are still running.
4142 // Set has_previous_version flag for processing during class unloading.
4143 _has_previous_versions = true;
4144 log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
4145 assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version");
4146 scratch_class->link_previous_versions(previous_versions());
4147 link_previous_versions(scratch_class);
4148 } // end add_previous_version()
4149
4150 #endif // INCLUDE_JVMTI
4151
4152 Method* InstanceKlass::method_with_idnum(int idnum) {
4153 Method* m = NULL;
4154 if (idnum < methods()->length()) {
4155 m = methods()->at(idnum);
4156 }
4157 if (m == NULL || m->method_idnum() != idnum) {
4158 for (int index = 0; index < methods()->length(); ++index) {
4159 m = methods()->at(index);
4160 if (m->method_idnum() == idnum) {
4161 return m;
4162 }
4163 }
4164 // None found, return null for the caller to handle.
4165 return NULL;
4166 }
4167 return m;
4168 }
4169
4170
4171 Method* InstanceKlass::method_with_orig_idnum(int idnum) {
4172 if (idnum >= methods()->length()) {
4173 return NULL;
4174 }
4175 Method* m = methods()->at(idnum);
4176 if (m != NULL && m->orig_method_idnum() == idnum) {
4177 return m;
4178 }
4179 // Obsolete method idnum does not match the original idnum
4180 for (int index = 0; index < methods()->length(); ++index) {
4181 m = methods()->at(index);
4182 if (m->orig_method_idnum() == idnum) {
4183 return m;
4184 }
4185 }
4186 // None found, return null for the caller to handle.
4187 return NULL;
4188 }
4189
4190
4191 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) {
4192 InstanceKlass* holder = get_klass_version(version);
4193 if (holder == NULL) {
4194 return NULL; // The version of klass is gone, no method is found
4195 }
4196 Method* method = holder->method_with_orig_idnum(idnum);
4197 return method;
4198 }
4199
4200 #if INCLUDE_JVMTI
4201 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
4202 return _cached_class_file;
4203 }
4204
4205 jint InstanceKlass::get_cached_class_file_len() {
4206 return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
4207 }
4208
4209 unsigned char * InstanceKlass::get_cached_class_file_bytes() {
4210 return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
4211 }
4212 #endif
4213
4214 bool InstanceKlass::is_shareable() const {
4215 #if INCLUDE_CDS
4216 ClassLoaderData* loader_data = class_loader_data();
4217 if (!SystemDictionaryShared::is_sharing_possible(loader_data)) {
4218 return false;
4219 }
4220
4221 if (is_hidden()) {
4222 return false;
4223 }
4224
4225 if (module()->is_patched()) {
4226 return false;
4227 }
4228
4229 return true;
4230 #else
4231 return false;
4232 #endif
4233 }
4234
4235 void InstanceKlass::log_to_classlist() const {
4236 #if INCLUDE_CDS
4237 ResourceMark rm;
4238 if (ClassListWriter::is_enabled()) {
4239 if (!ClassLoader::has_jrt_entry()) {
4240 warning("DumpLoadedClassList and CDS are not supported in exploded build");
4241 DumpLoadedClassList = NULL;
4242 return;
4243 }
4244 if (is_shareable()) {
4245 ClassListWriter w;
4246 w.stream()->print_cr("%s", name()->as_C_string());
4247 w.stream()->flush();
4248 }
4249 }
4250 #endif // INCLUDE_CDS
4251 }
4252
4253 // Make a step iterating over the class hierarchy under the root class.
4254 // Skips subclasses if requested.
4255 void ClassHierarchyIterator::next() {
4256 assert(_current != NULL, "required");
4257 if (_visit_subclasses && _current->subklass() != NULL) {
4258 _current = _current->subklass();
4259 return; // visit next subclass
4260 }
4261 _visit_subclasses = true; // reset
4262 while (_current->next_sibling() == NULL && _current != _root) {
4263 _current = _current->superklass(); // backtrack; no more sibling subclasses left
4264 }
4265 if (_current == _root) {
4266 // Iteration is over (back at root after backtracking). Invalidate the iterator.
4267 _current = NULL;
4268 return;
4269 }
4270 _current = _current->next_sibling();
4271 return; // visit next sibling subclass
4272 }