1 /* 2 * Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "cds/aotMetaspace.hpp" 26 #include "cds/archiveHeapLoader.hpp" 27 #include "cds/cdsConfig.hpp" 28 #include "cds/dynamicArchive.hpp" 29 #include "cds/heapShared.hpp" 30 #include "classfile/classLoader.hpp" 31 #include "classfile/classLoaderDataGraph.hpp" 32 #include "classfile/classLoaderDataShared.hpp" 33 #include "classfile/javaClasses.hpp" 34 #include "classfile/stringTable.hpp" 35 #include "classfile/symbolTable.hpp" 36 #include "classfile/systemDictionary.hpp" 37 #include "classfile/vmClasses.hpp" 38 #include "classfile/vmSymbols.hpp" 39 #include "code/codeBehaviours.hpp" 40 #include "code/codeCache.hpp" 41 #include "compiler/oopMap.hpp" 42 #include "gc/shared/collectedHeap.inline.hpp" 43 #include "gc/shared/gcArguments.hpp" 44 #include "gc/shared/gcConfig.hpp" 45 #include "gc/shared/gcLogPrecious.hpp" 46 #include "gc/shared/gcTraceTime.inline.hpp" 47 #include "gc/shared/oopStorageSet.hpp" 48 #include "gc/shared/plab.hpp" 49 #include "gc/shared/stringdedup/stringDedup.hpp" 50 #include "gc/shared/tlab_globals.hpp" 51 #include "logging/log.hpp" 52 #include "logging/logStream.hpp" 53 #include "memory/memoryReserver.hpp" 54 #include "memory/metadataFactory.hpp" 55 #include "memory/metaspaceClosure.hpp" 56 #include "memory/metaspaceCounters.hpp" 57 #include "memory/metaspaceUtils.hpp" 58 #include "memory/oopFactory.hpp" 59 #include "memory/resourceArea.hpp" 60 #include "memory/universe.hpp" 61 #include "oops/compressedOops.hpp" 62 #include "oops/instanceKlass.hpp" 63 #include "oops/instanceMirrorKlass.hpp" 64 #include "oops/jmethodIDTable.hpp" 65 #include "oops/klass.inline.hpp" 66 #include "oops/objArrayOop.inline.hpp" 67 #include "oops/objLayout.hpp" 68 #include "oops/oop.inline.hpp" 69 #include "oops/oopHandle.inline.hpp" 70 #include "oops/typeArrayKlass.hpp" 71 #include "prims/resolvedMethodTable.hpp" 72 #include "runtime/arguments.hpp" 73 #include "runtime/atomicAccess.hpp" 74 #include "runtime/cpuTimeCounters.hpp" 75 #include "runtime/flags/jvmFlagLimit.hpp" 76 #include "runtime/handles.inline.hpp" 77 #include "runtime/init.hpp" 78 #include "runtime/java.hpp" 79 #include "runtime/javaThread.hpp" 80 #include "runtime/jniHandles.hpp" 81 #include "runtime/threads.hpp" 82 #include "runtime/timerTrace.hpp" 83 #include "sanitizers/leak.hpp" 84 #include "services/cpuTimeUsage.hpp" 85 #include "services/memoryService.hpp" 86 #include "utilities/align.hpp" 87 #include "utilities/autoRestore.hpp" 88 #include "utilities/debug.hpp" 89 #include "utilities/formatBuffer.hpp" 90 #include "utilities/globalDefinitions.hpp" 91 #include "utilities/macros.hpp" 92 #include "utilities/ostream.hpp" 93 #include "utilities/preserveException.hpp" 94 95 // A helper class for caching a Method* when the user of the cache 96 // only cares about the latest version of the Method*. This cache safely 97 // interacts with the RedefineClasses API. 98 class LatestMethodCache { 99 // We save the InstanceKlass* and the idnum of Method* in order to get 100 // the current Method*. 101 InstanceKlass* _klass; 102 int _method_idnum; 103 104 public: 105 LatestMethodCache() { _klass = nullptr; _method_idnum = -1; } 106 107 void init(JavaThread* current, InstanceKlass* ik, const char* method, 108 Symbol* signature, bool is_static); 109 Method* get_method(); 110 }; 111 112 static LatestMethodCache _finalizer_register_cache; // Finalizer.register() 113 static LatestMethodCache _loader_addClass_cache; // ClassLoader.addClass() 114 static LatestMethodCache _throw_illegal_access_error_cache; // Unsafe.throwIllegalAccessError() 115 static LatestMethodCache _throw_no_such_method_error_cache; // Unsafe.throwNoSuchMethodError() 116 static LatestMethodCache _do_stack_walk_cache; // AbstractStackWalker.doStackWalk() 117 118 // Known objects 119 TypeArrayKlass* Universe::_typeArrayKlasses[T_LONG+1] = { nullptr /*, nullptr...*/ }; 120 ObjArrayKlass* Universe::_objectArrayKlass = nullptr; 121 Klass* Universe::_fillerArrayKlass = nullptr; 122 OopHandle Universe::_basic_type_mirrors[T_VOID+1]; 123 #if INCLUDE_CDS_JAVA_HEAP 124 int Universe::_archived_basic_type_mirror_indices[T_VOID+1]; 125 #endif 126 127 OopHandle Universe::_main_thread_group; 128 OopHandle Universe::_system_thread_group; 129 OopHandle Universe::_the_empty_class_array; 130 OopHandle Universe::_the_null_string; 131 OopHandle Universe::_the_min_jint_string; 132 133 OopHandle Universe::_the_null_sentinel; 134 135 // _out_of_memory_errors is an objArray 136 enum OutOfMemoryInstance { _oom_java_heap, 137 _oom_c_heap, 138 _oom_metaspace, 139 _oom_class_metaspace, 140 _oom_array_size, 141 _oom_gc_overhead_limit, 142 _oom_realloc_objects, 143 _oom_count }; 144 145 OopHandle Universe::_out_of_memory_errors; 146 OopHandle Universe:: _class_init_stack_overflow_error; 147 OopHandle Universe::_delayed_stack_overflow_error_message; 148 OopHandle Universe::_preallocated_out_of_memory_error_array; 149 volatile jint Universe::_preallocated_out_of_memory_error_avail_count = 0; 150 151 // Message details for OOME objects, preallocate these objects since they could be 152 // used when throwing OOME, we should try to avoid further allocation in such case 153 OopHandle Universe::_msg_metaspace; 154 OopHandle Universe::_msg_class_metaspace; 155 156 OopHandle Universe::_reference_pending_list; 157 158 Array<Klass*>* Universe::_the_array_interfaces_array = nullptr; 159 160 long Universe::verify_flags = Universe::Verify_All; 161 162 Array<int>* Universe::_the_empty_int_array = nullptr; 163 Array<u2>* Universe::_the_empty_short_array = nullptr; 164 Array<Klass*>* Universe::_the_empty_klass_array = nullptr; 165 Array<InstanceKlass*>* Universe::_the_empty_instance_klass_array = nullptr; 166 Array<Method*>* Universe::_the_empty_method_array = nullptr; 167 168 uintx Universe::_the_array_interfaces_bitmap = 0; 169 uintx Universe::_the_empty_klass_bitmap = 0; 170 171 // These variables are guarded by FullGCALot_lock. 172 DEBUG_ONLY(OopHandle Universe::_fullgc_alot_dummy_array;) 173 DEBUG_ONLY(int Universe::_fullgc_alot_dummy_next = 0;) 174 175 // Heap 176 int Universe::_verify_count = 0; 177 178 // Oop verification (see MacroAssembler::verify_oop) 179 uintptr_t Universe::_verify_oop_mask = 0; 180 uintptr_t Universe::_verify_oop_bits = (uintptr_t) -1; 181 182 int Universe::_base_vtable_size = 0; 183 bool Universe::_bootstrapping = false; 184 bool Universe::_module_initialized = false; 185 bool Universe::_fully_initialized = false; 186 volatile bool Universe::_is_shutting_down = false; 187 188 OopStorage* Universe::_vm_weak = nullptr; 189 OopStorage* Universe::_vm_global = nullptr; 190 191 CollectedHeap* Universe::_collectedHeap = nullptr; 192 193 // These are the exceptions that are always created and are guatanteed to exist. 194 // If possible, they can be stored as CDS archived objects to speed up AOT code. 195 class BuiltinException { 196 OopHandle _instance; 197 CDS_JAVA_HEAP_ONLY(int _archived_root_index;) 198 199 public: 200 BuiltinException() : _instance() { 201 CDS_JAVA_HEAP_ONLY(_archived_root_index = 0); 202 } 203 204 void init_if_empty(Symbol* symbol, TRAPS) { 205 if (_instance.is_empty()) { 206 Klass* k = SystemDictionary::resolve_or_fail(symbol, true, CHECK); 207 oop obj = InstanceKlass::cast(k)->allocate_instance(CHECK); 208 _instance = OopHandle(Universe::vm_global(), obj); 209 } 210 } 211 212 oop instance() { 213 return _instance.resolve(); 214 } 215 216 #if INCLUDE_CDS_JAVA_HEAP 217 void store_in_cds() { 218 _archived_root_index = HeapShared::archive_exception_instance(instance()); 219 } 220 221 void load_from_cds() { 222 if (_archived_root_index >= 0) { 223 oop obj = HeapShared::get_root(_archived_root_index); 224 assert(obj != nullptr, "must be"); 225 _instance = OopHandle(Universe::vm_global(), obj); 226 } 227 } 228 229 void serialize(SerializeClosure *f) { 230 f->do_int(&_archived_root_index); 231 } 232 #endif 233 }; 234 235 static BuiltinException _null_ptr_exception; 236 static BuiltinException _arithmetic_exception; 237 static BuiltinException _internal_error; 238 static BuiltinException _array_index_out_of_bounds_exception; 239 static BuiltinException _array_store_exception; 240 static BuiltinException _class_cast_exception; 241 242 objArrayOop Universe::the_empty_class_array () { 243 return (objArrayOop)_the_empty_class_array.resolve(); 244 } 245 246 oop Universe::main_thread_group() { return _main_thread_group.resolve(); } 247 void Universe::set_main_thread_group(oop group) { _main_thread_group = OopHandle(vm_global(), group); } 248 249 oop Universe::system_thread_group() { return _system_thread_group.resolve(); } 250 void Universe::set_system_thread_group(oop group) { _system_thread_group = OopHandle(vm_global(), group); } 251 252 oop Universe::the_null_string() { return _the_null_string.resolve(); } 253 oop Universe::the_min_jint_string() { return _the_min_jint_string.resolve(); } 254 255 oop Universe::null_ptr_exception_instance() { return _null_ptr_exception.instance(); } 256 oop Universe::arithmetic_exception_instance() { return _arithmetic_exception.instance(); } 257 oop Universe::internal_error_instance() { return _internal_error.instance(); } 258 oop Universe::array_index_out_of_bounds_exception_instance() { return _array_index_out_of_bounds_exception.instance(); } 259 oop Universe::array_store_exception_instance() { return _array_store_exception.instance(); } 260 oop Universe::class_cast_exception_instance() { return _class_cast_exception.instance(); } 261 262 oop Universe::the_null_sentinel() { return _the_null_sentinel.resolve(); } 263 264 oop Universe::int_mirror() { return check_mirror(_basic_type_mirrors[T_INT].resolve()); } 265 oop Universe::float_mirror() { return check_mirror(_basic_type_mirrors[T_FLOAT].resolve()); } 266 oop Universe::double_mirror() { return check_mirror(_basic_type_mirrors[T_DOUBLE].resolve()); } 267 oop Universe::byte_mirror() { return check_mirror(_basic_type_mirrors[T_BYTE].resolve()); } 268 oop Universe::bool_mirror() { return check_mirror(_basic_type_mirrors[T_BOOLEAN].resolve()); } 269 oop Universe::char_mirror() { return check_mirror(_basic_type_mirrors[T_CHAR].resolve()); } 270 oop Universe::long_mirror() { return check_mirror(_basic_type_mirrors[T_LONG].resolve()); } 271 oop Universe::short_mirror() { return check_mirror(_basic_type_mirrors[T_SHORT].resolve()); } 272 oop Universe::void_mirror() { return check_mirror(_basic_type_mirrors[T_VOID].resolve()); } 273 274 oop Universe::java_mirror(BasicType t) { 275 assert((uint)t < T_VOID+1, "range check"); 276 assert(!is_reference_type(t), "sanity"); 277 return check_mirror(_basic_type_mirrors[t].resolve()); 278 } 279 280 void Universe::basic_type_classes_do(KlassClosure *closure) { 281 for (int i = T_BOOLEAN; i < T_LONG+1; i++) { 282 closure->do_klass(_typeArrayKlasses[i]); 283 } 284 // We don't do the following because it will confuse JVMTI. 285 // _fillerArrayKlass is used only by GC, which doesn't need to see 286 // this klass from basic_type_classes_do(). 287 // 288 // closure->do_klass(_fillerArrayKlass); 289 } 290 291 void Universe::metaspace_pointers_do(MetaspaceClosure* it) { 292 it->push(&_fillerArrayKlass); 293 for (int i = 0; i < T_LONG+1; i++) { 294 it->push(&_typeArrayKlasses[i]); 295 } 296 it->push(&_objectArrayKlass); 297 298 it->push(&_the_empty_int_array); 299 it->push(&_the_empty_short_array); 300 it->push(&_the_empty_klass_array); 301 it->push(&_the_empty_instance_klass_array); 302 it->push(&_the_empty_method_array); 303 it->push(&_the_array_interfaces_array); 304 } 305 306 #if INCLUDE_CDS_JAVA_HEAP 307 void Universe::set_archived_basic_type_mirror_index(BasicType t, int index) { 308 assert(CDSConfig::is_dumping_heap(), "sanity"); 309 assert(!is_reference_type(t), "sanity"); 310 _archived_basic_type_mirror_indices[t] = index; 311 } 312 313 void Universe::archive_exception_instances() { 314 _null_ptr_exception.store_in_cds(); 315 _arithmetic_exception.store_in_cds(); 316 _internal_error.store_in_cds(); 317 _array_index_out_of_bounds_exception.store_in_cds(); 318 _array_store_exception.store_in_cds(); 319 _class_cast_exception.store_in_cds(); 320 } 321 322 void Universe::load_archived_object_instances() { 323 if (ArchiveHeapLoader::is_in_use()) { 324 for (int i = T_BOOLEAN; i < T_VOID+1; i++) { 325 int index = _archived_basic_type_mirror_indices[i]; 326 if (!is_reference_type((BasicType)i) && index >= 0) { 327 oop mirror_oop = HeapShared::get_root(index); 328 assert(mirror_oop != nullptr, "must be"); 329 _basic_type_mirrors[i] = OopHandle(vm_global(), mirror_oop); 330 } 331 } 332 333 _null_ptr_exception.load_from_cds(); 334 _arithmetic_exception.load_from_cds(); 335 _internal_error.load_from_cds(); 336 _array_index_out_of_bounds_exception.load_from_cds(); 337 _array_store_exception.load_from_cds(); 338 _class_cast_exception.load_from_cds(); 339 } 340 } 341 #endif 342 343 void Universe::serialize(SerializeClosure* f) { 344 345 #if INCLUDE_CDS_JAVA_HEAP 346 for (int i = T_BOOLEAN; i < T_VOID+1; i++) { 347 f->do_int(&_archived_basic_type_mirror_indices[i]); 348 // if f->reading(): We can't call HeapShared::get_root() yet, as the heap 349 // contents may need to be relocated. _basic_type_mirrors[i] will be 350 // updated later in Universe::load_archived_object_instances(). 351 } 352 _null_ptr_exception.serialize(f); 353 _arithmetic_exception.serialize(f); 354 _internal_error.serialize(f); 355 _array_index_out_of_bounds_exception.serialize(f); 356 _array_store_exception.serialize(f); 357 _class_cast_exception.serialize(f); 358 #endif 359 360 f->do_ptr(&_fillerArrayKlass); 361 for (int i = 0; i < T_LONG+1; i++) { 362 f->do_ptr(&_typeArrayKlasses[i]); 363 } 364 365 f->do_ptr(&_objectArrayKlass); 366 f->do_ptr(&_the_array_interfaces_array); 367 f->do_ptr(&_the_empty_int_array); 368 f->do_ptr(&_the_empty_short_array); 369 f->do_ptr(&_the_empty_method_array); 370 f->do_ptr(&_the_empty_klass_array); 371 f->do_ptr(&_the_empty_instance_klass_array); 372 } 373 374 375 void Universe::check_alignment(uintx size, uintx alignment, const char* name) { 376 if (size < alignment || size % alignment != 0) { 377 vm_exit_during_initialization( 378 err_msg("Size of %s (%zu bytes) must be aligned to %zu bytes", name, size, alignment)); 379 } 380 } 381 382 static void initialize_basic_type_klass(Klass* k, TRAPS) { 383 Klass* ok = vmClasses::Object_klass(); 384 #if INCLUDE_CDS 385 if (CDSConfig::is_using_archive()) { 386 ClassLoaderData* loader_data = ClassLoaderData::the_null_class_loader_data(); 387 assert(k->super() == ok, "u3"); 388 if (k->is_instance_klass()) { 389 InstanceKlass::cast(k)->restore_unshareable_info(loader_data, Handle(), nullptr, CHECK); 390 } else { 391 ArrayKlass::cast(k)->restore_unshareable_info(loader_data, Handle(), CHECK); 392 } 393 } else 394 #endif 395 { 396 k->initialize_supers(ok, nullptr, CHECK); 397 } 398 k->append_to_sibling_list(); 399 } 400 401 void Universe::genesis(TRAPS) { 402 ResourceMark rm(THREAD); 403 HandleMark hm(THREAD); 404 405 // Explicit null checks are needed if these offsets are not smaller than the page size 406 if (UseCompactObjectHeaders) { 407 assert(oopDesc::mark_offset_in_bytes() < static_cast<intptr_t>(os::vm_page_size()), 408 "Mark offset is expected to be less than the page size"); 409 } else { 410 assert(oopDesc::klass_offset_in_bytes() < static_cast<intptr_t>(os::vm_page_size()), 411 "Klass offset is expected to be less than the page size"); 412 } 413 assert(arrayOopDesc::length_offset_in_bytes() < static_cast<intptr_t>(os::vm_page_size()), 414 "Array length offset is expected to be less than the page size"); 415 416 { AutoModifyRestore<bool> temporarily(_bootstrapping, true); 417 418 java_lang_Class::allocate_fixup_lists(); 419 420 // determine base vtable size; without that we cannot create the array klasses 421 compute_base_vtable_size(); 422 423 if (!CDSConfig::is_using_archive()) { 424 // Initialization of the fillerArrayKlass must come before regular 425 // int-TypeArrayKlass so that the int-Array mirror points to the 426 // int-TypeArrayKlass. 427 _fillerArrayKlass = TypeArrayKlass::create_klass(T_INT, "[Ljdk/internal/vm/FillerElement;", CHECK); 428 for (int i = T_BOOLEAN; i < T_LONG+1; i++) { 429 _typeArrayKlasses[i] = TypeArrayKlass::create_klass((BasicType)i, CHECK); 430 } 431 432 ClassLoaderData* null_cld = ClassLoaderData::the_null_class_loader_data(); 433 434 _the_array_interfaces_array = MetadataFactory::new_array<Klass*>(null_cld, 2, nullptr, CHECK); 435 _the_empty_int_array = MetadataFactory::new_array<int>(null_cld, 0, CHECK); 436 _the_empty_short_array = MetadataFactory::new_array<u2>(null_cld, 0, CHECK); 437 _the_empty_method_array = MetadataFactory::new_array<Method*>(null_cld, 0, CHECK); 438 _the_empty_klass_array = MetadataFactory::new_array<Klass*>(null_cld, 0, CHECK); 439 _the_empty_instance_klass_array = MetadataFactory::new_array<InstanceKlass*>(null_cld, 0, CHECK); 440 } 441 442 vmSymbols::initialize(); 443 444 // Initialize table for matching jmethodID, before SystemDictionary. 445 JmethodIDTable::initialize(); 446 447 SystemDictionary::initialize(CHECK); 448 449 // Create string constants 450 oop s = StringTable::intern("null", CHECK); 451 _the_null_string = OopHandle(vm_global(), s); 452 s = StringTable::intern("-2147483648", CHECK); 453 _the_min_jint_string = OopHandle(vm_global(), s); 454 455 #if INCLUDE_CDS 456 if (CDSConfig::is_using_archive()) { 457 // Verify shared interfaces array. 458 assert(_the_array_interfaces_array->at(0) == 459 vmClasses::Cloneable_klass(), "u3"); 460 assert(_the_array_interfaces_array->at(1) == 461 vmClasses::Serializable_klass(), "u3"); 462 } else 463 #endif 464 { 465 // Set up shared interfaces array. (Do this before supers are set up.) 466 _the_array_interfaces_array->at_put(0, vmClasses::Cloneable_klass()); 467 _the_array_interfaces_array->at_put(1, vmClasses::Serializable_klass()); 468 } 469 470 _the_array_interfaces_bitmap = Klass::compute_secondary_supers_bitmap(_the_array_interfaces_array); 471 _the_empty_klass_bitmap = Klass::compute_secondary_supers_bitmap(_the_empty_klass_array); 472 473 initialize_basic_type_klass(_fillerArrayKlass, CHECK); 474 475 initialize_basic_type_klass(boolArrayKlass(), CHECK); 476 initialize_basic_type_klass(charArrayKlass(), CHECK); 477 initialize_basic_type_klass(floatArrayKlass(), CHECK); 478 initialize_basic_type_klass(doubleArrayKlass(), CHECK); 479 initialize_basic_type_klass(byteArrayKlass(), CHECK); 480 initialize_basic_type_klass(shortArrayKlass(), CHECK); 481 initialize_basic_type_klass(intArrayKlass(), CHECK); 482 initialize_basic_type_klass(longArrayKlass(), CHECK); 483 484 assert(_fillerArrayKlass != intArrayKlass(), 485 "Internal filler array klass should be different to int array Klass"); 486 } // end of core bootstrapping 487 488 { 489 Handle tns = java_lang_String::create_from_str("<null_sentinel>", CHECK); 490 _the_null_sentinel = OopHandle(vm_global(), tns()); 491 } 492 493 // Create a handle for reference_pending_list 494 _reference_pending_list = OopHandle(vm_global(), nullptr); 495 496 // Maybe this could be lifted up now that object array can be initialized 497 // during the bootstrapping. 498 499 // OLD 500 // Initialize _objectArrayKlass after core bootstraping to make 501 // sure the super class is set up properly for _objectArrayKlass. 502 // --- 503 // NEW 504 // Since some of the old system object arrays have been converted to 505 // ordinary object arrays, _objectArrayKlass will be loaded when 506 // SystemDictionary::initialize(CHECK); is run. See the extra check 507 // for Object_klass_loaded in objArrayKlassKlass::allocate_objArray_klass_impl. 508 { 509 Klass* oak = vmClasses::Object_klass()->array_klass(CHECK); 510 _objectArrayKlass = ObjArrayKlass::cast(oak); 511 } 512 // OLD 513 // Add the class to the class hierarchy manually to make sure that 514 // its vtable is initialized after core bootstrapping is completed. 515 // --- 516 // New 517 // Have already been initialized. 518 _objectArrayKlass->append_to_sibling_list(); 519 520 #ifdef ASSERT 521 if (FullGCALot) { 522 // Allocate an array of dummy objects. 523 // We'd like these to be at the bottom of the old generation, 524 // so that when we free one and then collect, 525 // (almost) the whole heap moves 526 // and we find out if we actually update all the oops correctly. 527 // But we can't allocate directly in the old generation, 528 // so we allocate wherever, and hope that the first collection 529 // moves these objects to the bottom of the old generation. 530 int size = FullGCALotDummies * 2; 531 532 objArrayOop naked_array = oopFactory::new_objArray(vmClasses::Object_klass(), size, CHECK); 533 objArrayHandle dummy_array(THREAD, naked_array); 534 int i = 0; 535 while (i < size) { 536 // Allocate dummy in old generation 537 oop dummy = vmClasses::Object_klass()->allocate_instance(CHECK); 538 dummy_array->obj_at_put(i++, dummy); 539 } 540 { 541 // Only modify the global variable inside the mutex. 542 // If we had a race to here, the other dummy_array instances 543 // and their elements just get dropped on the floor, which is fine. 544 MutexLocker ml(THREAD, FullGCALot_lock); 545 if (_fullgc_alot_dummy_array.is_empty()) { 546 _fullgc_alot_dummy_array = OopHandle(vm_global(), dummy_array()); 547 } 548 } 549 assert(i == ((objArrayOop)_fullgc_alot_dummy_array.resolve())->length(), "just checking"); 550 } 551 #endif 552 } 553 554 void Universe::initialize_basic_type_mirrors(TRAPS) { 555 #if INCLUDE_CDS_JAVA_HEAP 556 if (CDSConfig::is_using_archive() && 557 ArchiveHeapLoader::is_in_use() && 558 _basic_type_mirrors[T_INT].resolve() != nullptr) { 559 assert(ArchiveHeapLoader::can_use(), "Sanity"); 560 561 // check that all basic type mirrors are mapped also 562 for (int i = T_BOOLEAN; i < T_VOID+1; i++) { 563 if (!is_reference_type((BasicType)i)) { 564 oop m = _basic_type_mirrors[i].resolve(); 565 assert(m != nullptr, "archived mirrors should not be null"); 566 } 567 } 568 } else 569 // _basic_type_mirrors[T_INT], etc, are null if archived heap is not mapped. 570 #endif 571 { 572 for (int i = T_BOOLEAN; i < T_VOID+1; i++) { 573 BasicType bt = (BasicType)i; 574 if (!is_reference_type(bt)) { 575 oop m = java_lang_Class::create_basic_type_mirror(type2name(bt), bt, CHECK); 576 _basic_type_mirrors[i] = OopHandle(vm_global(), m); 577 } 578 CDS_JAVA_HEAP_ONLY(_archived_basic_type_mirror_indices[i] = -1); 579 } 580 } 581 if (CDSConfig::is_dumping_heap()) { 582 HeapShared::init_scratch_objects_for_basic_type_mirrors(CHECK); 583 } 584 } 585 586 void Universe::fixup_mirrors(TRAPS) { 587 if (CDSConfig::is_using_aot_linked_classes()) { 588 // All mirrors of preloaded classes are already restored. No need to fix up. 589 return; 590 } 591 592 // Bootstrap problem: all classes gets a mirror (java.lang.Class instance) assigned eagerly, 593 // but we cannot do that for classes created before java.lang.Class is loaded. Here we simply 594 // walk over permanent objects created so far (mostly classes) and fixup their mirrors. Note 595 // that the number of objects allocated at this point is very small. 596 assert(vmClasses::Class_klass_loaded(), "java.lang.Class should be loaded"); 597 HandleMark hm(THREAD); 598 599 if (!CDSConfig::is_using_archive()) { 600 // Cache the start of the static fields 601 InstanceMirrorKlass::init_offset_of_static_fields(); 602 } 603 604 GrowableArray <Klass*>* list = java_lang_Class::fixup_mirror_list(); 605 int list_length = list->length(); 606 for (int i = 0; i < list_length; i++) { 607 Klass* k = list->at(i); 608 assert(k->is_klass(), "List should only hold classes"); 609 java_lang_Class::fixup_mirror(k, CATCH); 610 } 611 delete java_lang_Class::fixup_mirror_list(); 612 java_lang_Class::set_fixup_mirror_list(nullptr); 613 } 614 615 #define assert_pll_locked(test) \ 616 assert(Heap_lock->test(), "Reference pending list access requires lock") 617 618 #define assert_pll_ownership() assert_pll_locked(owned_by_self) 619 620 oop Universe::reference_pending_list() { 621 if (Thread::current()->is_VM_thread()) { 622 assert_pll_locked(is_locked); 623 } else { 624 assert_pll_ownership(); 625 } 626 return _reference_pending_list.resolve(); 627 } 628 629 void Universe::clear_reference_pending_list() { 630 assert_pll_ownership(); 631 _reference_pending_list.replace(nullptr); 632 } 633 634 bool Universe::has_reference_pending_list() { 635 assert_pll_ownership(); 636 return _reference_pending_list.peek() != nullptr; 637 } 638 639 oop Universe::swap_reference_pending_list(oop list) { 640 assert_pll_locked(is_locked); 641 return _reference_pending_list.xchg(list); 642 } 643 644 #undef assert_pll_locked 645 #undef assert_pll_ownership 646 647 static void reinitialize_vtables() { 648 // The vtables are initialized by starting at java.lang.Object and 649 // initializing through the subclass links, so that the super 650 // classes are always initialized first. 651 for (ClassHierarchyIterator iter(vmClasses::Object_klass()); !iter.done(); iter.next()) { 652 Klass* sub = iter.klass(); 653 sub->vtable().initialize_vtable(); 654 } 655 } 656 657 static void reinitialize_itables() { 658 659 class ReinitTableClosure : public KlassClosure { 660 public: 661 void do_klass(Klass* k) { 662 if (k->is_instance_klass()) { 663 InstanceKlass::cast(k)->itable().initialize_itable(); 664 } 665 } 666 }; 667 668 MutexLocker mcld(ClassLoaderDataGraph_lock); 669 ReinitTableClosure cl; 670 ClassLoaderDataGraph::classes_do(&cl); 671 } 672 673 bool Universe::on_page_boundary(void* addr) { 674 return is_aligned(addr, os::vm_page_size()); 675 } 676 677 // the array of preallocated errors with backtraces 678 objArrayOop Universe::preallocated_out_of_memory_errors() { 679 return (objArrayOop)_preallocated_out_of_memory_error_array.resolve(); 680 } 681 682 objArrayOop Universe::out_of_memory_errors() { return (objArrayOop)_out_of_memory_errors.resolve(); } 683 684 oop Universe::out_of_memory_error_java_heap() { 685 return gen_out_of_memory_error(out_of_memory_errors()->obj_at(_oom_java_heap)); 686 } 687 688 oop Universe::out_of_memory_error_java_heap_without_backtrace() { 689 return out_of_memory_errors()->obj_at(_oom_java_heap); 690 } 691 692 oop Universe::out_of_memory_error_c_heap() { 693 return gen_out_of_memory_error(out_of_memory_errors()->obj_at(_oom_c_heap)); 694 } 695 696 oop Universe::out_of_memory_error_metaspace() { 697 return gen_out_of_memory_error(out_of_memory_errors()->obj_at(_oom_metaspace)); 698 } 699 700 oop Universe::out_of_memory_error_class_metaspace() { 701 return gen_out_of_memory_error(out_of_memory_errors()->obj_at(_oom_class_metaspace)); 702 } 703 704 oop Universe::out_of_memory_error_array_size() { 705 return gen_out_of_memory_error(out_of_memory_errors()->obj_at(_oom_array_size)); 706 } 707 708 oop Universe::out_of_memory_error_gc_overhead_limit() { 709 return gen_out_of_memory_error(out_of_memory_errors()->obj_at(_oom_gc_overhead_limit)); 710 } 711 712 oop Universe::out_of_memory_error_realloc_objects() { 713 return gen_out_of_memory_error(out_of_memory_errors()->obj_at(_oom_realloc_objects)); 714 } 715 716 oop Universe::class_init_out_of_memory_error() { return out_of_memory_errors()->obj_at(_oom_java_heap); } 717 oop Universe::class_init_stack_overflow_error() { return _class_init_stack_overflow_error.resolve(); } 718 oop Universe::delayed_stack_overflow_error_message() { return _delayed_stack_overflow_error_message.resolve(); } 719 720 721 bool Universe::should_fill_in_stack_trace(Handle throwable) { 722 // never attempt to fill in the stack trace of preallocated errors that do not have 723 // backtrace. These errors are kept alive forever and may be "re-used" when all 724 // preallocated errors with backtrace have been consumed. Also need to avoid 725 // a potential loop which could happen if an out of memory occurs when attempting 726 // to allocate the backtrace. 727 objArrayOop preallocated_oom = out_of_memory_errors(); 728 for (int i = 0; i < _oom_count; i++) { 729 if (throwable() == preallocated_oom->obj_at(i)) { 730 return false; 731 } 732 } 733 return true; 734 } 735 736 737 oop Universe::gen_out_of_memory_error(oop default_err) { 738 // generate an out of memory error: 739 // - if there is a preallocated error and stack traces are available 740 // (j.l.Throwable is initialized), then return the preallocated 741 // error with a filled in stack trace, and with the message 742 // provided by the default error. 743 // - otherwise, return the default error, without a stack trace. 744 int next; 745 if ((_preallocated_out_of_memory_error_avail_count > 0) && 746 vmClasses::Throwable_klass()->is_initialized()) { 747 next = (int)AtomicAccess::add(&_preallocated_out_of_memory_error_avail_count, -1); 748 assert(next < (int)PreallocatedOutOfMemoryErrorCount, "avail count is corrupt"); 749 } else { 750 next = -1; 751 } 752 if (next < 0) { 753 // all preallocated errors have been used. 754 // return default 755 return default_err; 756 } else { 757 JavaThread* current = JavaThread::current(); 758 Handle default_err_h(current, default_err); 759 // get the error object at the slot and set set it to null so that the 760 // array isn't keeping it alive anymore. 761 Handle exc(current, preallocated_out_of_memory_errors()->obj_at(next)); 762 assert(exc() != nullptr, "slot has been used already"); 763 preallocated_out_of_memory_errors()->obj_at_put(next, nullptr); 764 765 // use the message from the default error 766 oop msg = java_lang_Throwable::message(default_err_h()); 767 assert(msg != nullptr, "no message"); 768 java_lang_Throwable::set_message(exc(), msg); 769 770 // populate the stack trace and return it. 771 java_lang_Throwable::fill_in_stack_trace_of_preallocated_backtrace(exc); 772 return exc(); 773 } 774 } 775 776 bool Universe::is_out_of_memory_error_metaspace(oop ex_obj) { 777 return java_lang_Throwable::message(ex_obj) == _msg_metaspace.resolve(); 778 } 779 780 bool Universe::is_out_of_memory_error_class_metaspace(oop ex_obj) { 781 return java_lang_Throwable::message(ex_obj) == _msg_class_metaspace.resolve(); 782 } 783 784 // Setup preallocated OutOfMemoryError errors 785 void Universe::create_preallocated_out_of_memory_errors(TRAPS) { 786 InstanceKlass* ik = vmClasses::OutOfMemoryError_klass(); 787 objArrayOop oa = oopFactory::new_objArray(ik, _oom_count, CHECK); 788 objArrayHandle oom_array(THREAD, oa); 789 790 for (int i = 0; i < _oom_count; i++) { 791 oop oom_obj = ik->allocate_instance(CHECK); 792 oom_array->obj_at_put(i, oom_obj); 793 } 794 _out_of_memory_errors = OopHandle(vm_global(), oom_array()); 795 796 Handle msg = java_lang_String::create_from_str("Java heap space", CHECK); 797 java_lang_Throwable::set_message(oom_array->obj_at(_oom_java_heap), msg()); 798 799 msg = java_lang_String::create_from_str("C heap space", CHECK); 800 java_lang_Throwable::set_message(oom_array->obj_at(_oom_c_heap), msg()); 801 802 msg = java_lang_String::create_from_str("Metaspace", CHECK); 803 _msg_metaspace = OopHandle(vm_global(), msg()); 804 java_lang_Throwable::set_message(oom_array->obj_at(_oom_metaspace), msg()); 805 806 msg = java_lang_String::create_from_str("Compressed class space", CHECK); 807 _msg_class_metaspace = OopHandle(vm_global(), msg()); 808 java_lang_Throwable::set_message(oom_array->obj_at(_oom_class_metaspace), msg()); 809 810 msg = java_lang_String::create_from_str("Requested array size exceeds VM limit", CHECK); 811 java_lang_Throwable::set_message(oom_array->obj_at(_oom_array_size), msg()); 812 813 msg = java_lang_String::create_from_str("GC overhead limit exceeded", CHECK); 814 java_lang_Throwable::set_message(oom_array->obj_at(_oom_gc_overhead_limit), msg()); 815 816 msg = java_lang_String::create_from_str("Java heap space: failed reallocation of scalar replaced objects", CHECK); 817 java_lang_Throwable::set_message(oom_array->obj_at(_oom_realloc_objects), msg()); 818 819 // Setup the array of errors that have preallocated backtrace 820 int len = (StackTraceInThrowable) ? (int)PreallocatedOutOfMemoryErrorCount : 0; 821 objArrayOop instance = oopFactory::new_objArray(ik, len, CHECK); 822 _preallocated_out_of_memory_error_array = OopHandle(vm_global(), instance); 823 objArrayHandle preallocated_oom_array(THREAD, instance); 824 825 for (int i=0; i<len; i++) { 826 oop err = ik->allocate_instance(CHECK); 827 Handle err_h(THREAD, err); 828 java_lang_Throwable::allocate_backtrace(err_h, CHECK); 829 preallocated_oom_array->obj_at_put(i, err_h()); 830 } 831 _preallocated_out_of_memory_error_avail_count = (jint)len; 832 } 833 834 intptr_t Universe::_non_oop_bits = 0; 835 836 void* Universe::non_oop_word() { 837 // Neither the high bits nor the low bits of this value is allowed 838 // to look like (respectively) the high or low bits of a real oop. 839 // 840 // High and low are CPU-specific notions, but low always includes 841 // the low-order bit. Since oops are always aligned at least mod 4, 842 // setting the low-order bit will ensure that the low half of the 843 // word will never look like that of a real oop. 844 // 845 // Using the OS-supplied non-memory-address word (usually 0 or -1) 846 // will take care of the high bits, however many there are. 847 848 if (_non_oop_bits == 0) { 849 _non_oop_bits = (intptr_t)os::non_memory_address_word() | 1; 850 } 851 852 return (void*)_non_oop_bits; 853 } 854 855 bool Universe::contains_non_oop_word(void* p) { 856 return *(void**)p == non_oop_word(); 857 } 858 859 static void initialize_global_behaviours() { 860 DefaultICProtectionBehaviour* protection_behavior = new DefaultICProtectionBehaviour(); 861 // Ignore leak of DefaultICProtectionBehaviour. It is overriden by some GC implementations and the 862 // pointer is leaked once. 863 LSAN_IGNORE_OBJECT(protection_behavior); 864 CompiledICProtectionBehaviour::set_current(protection_behavior); 865 } 866 867 jint universe_init() { 868 assert(!Universe::_fully_initialized, "called after initialize_vtables"); 869 guarantee(1 << LogHeapWordSize == sizeof(HeapWord), 870 "LogHeapWordSize is incorrect."); 871 guarantee(sizeof(oop) >= sizeof(HeapWord), "HeapWord larger than oop?"); 872 guarantee(sizeof(oop) % sizeof(HeapWord) == 0, 873 "oop size is not not a multiple of HeapWord size"); 874 875 TraceTime timer("Genesis", TRACETIME_LOG(Info, startuptime)); 876 877 initialize_global_behaviours(); 878 879 GCLogPrecious::initialize(); 880 881 // Initialize CPUTimeCounters object, which must be done before creation of the heap. 882 CPUTimeCounters::initialize(); 883 884 ObjLayout::initialize(); 885 886 #ifdef _LP64 887 AOTMetaspace::adjust_heap_sizes_for_dumping(); 888 #endif // _LP64 889 890 GCConfig::arguments()->initialize_heap_sizes(); 891 892 jint status = Universe::initialize_heap(); 893 if (status != JNI_OK) { 894 return status; 895 } 896 897 Universe::initialize_tlab(); 898 899 Metaspace::global_initialize(); 900 901 // Initialize performance counters for metaspaces 902 MetaspaceCounters::initialize_performance_counters(); 903 904 // Checks 'AfterMemoryInit' constraints. 905 if (!JVMFlagLimit::check_all_constraints(JVMFlagConstraintPhase::AfterMemoryInit)) { 906 return JNI_EINVAL; 907 } 908 909 #if INCLUDE_CDS 910 if (CDSConfig::is_using_archive()) { 911 // Read the data structures supporting the shared spaces (shared 912 // system dictionary, symbol table, etc.) 913 AOTMetaspace::initialize_shared_spaces(); 914 } 915 #endif 916 917 ClassLoaderData::init_null_class_loader_data(); 918 919 #if INCLUDE_CDS 920 #if INCLUDE_CDS_JAVA_HEAP 921 if (CDSConfig::is_using_full_module_graph()) { 922 ClassLoaderDataShared::restore_archived_entries_for_null_class_loader_data(); 923 } 924 #endif // INCLUDE_CDS_JAVA_HEAP 925 if (CDSConfig::is_dumping_archive()) { 926 CDSConfig::prepare_for_dumping(); 927 } 928 #endif 929 930 SymbolTable::create_table(); 931 StringTable::create_table(); 932 933 if (strlen(VerifySubSet) > 0) { 934 Universe::initialize_verify_flags(); 935 } 936 937 ResolvedMethodTable::create_table(); 938 939 return JNI_OK; 940 } 941 942 jint Universe::initialize_heap() { 943 assert(_collectedHeap == nullptr, "Heap already created"); 944 _collectedHeap = GCConfig::arguments()->create_heap(); 945 946 log_info(gc)("Using %s", _collectedHeap->name()); 947 return _collectedHeap->initialize(); 948 } 949 950 void Universe::initialize_tlab() { 951 ThreadLocalAllocBuffer::set_max_size(Universe::heap()->max_tlab_size()); 952 PLAB::startup_initialization(); 953 if (UseTLAB) { 954 ThreadLocalAllocBuffer::startup_initialization(); 955 } 956 } 957 958 ReservedHeapSpace Universe::reserve_heap(size_t heap_size, size_t alignment) { 959 960 assert(alignment <= Arguments::conservative_max_heap_alignment(), 961 "actual alignment %zu must be within maximum heap alignment %zu", 962 alignment, Arguments::conservative_max_heap_alignment()); 963 assert(is_aligned(heap_size, alignment), "precondition"); 964 965 size_t total_reserved = heap_size; 966 assert(!UseCompressedOops || (total_reserved <= (OopEncodingHeapMax - os::vm_page_size())), 967 "heap size is too big for compressed oops"); 968 969 size_t page_size = os::vm_page_size(); 970 if (UseLargePages && is_aligned(alignment, os::large_page_size())) { 971 page_size = os::large_page_size(); 972 } else { 973 // Parallel is the only collector that might opt out of using large pages 974 // for the heap. 975 assert(!UseLargePages || UseParallelGC , "Wrong alignment to use large pages"); 976 } 977 978 // Now create the space. 979 ReservedHeapSpace rhs = HeapReserver::reserve(total_reserved, alignment, page_size, AllocateHeapAt); 980 981 if (!rhs.is_reserved()) { 982 vm_exit_during_initialization( 983 err_msg("Could not reserve enough space for %zu KB object heap", 984 total_reserved/K)); 985 } 986 987 assert(total_reserved == rhs.size(), "must be exactly of required size"); 988 assert(is_aligned(rhs.base(),alignment),"must be exactly of required alignment"); 989 990 assert(markWord::encode_pointer_as_mark(rhs.base()).decode_pointer() == rhs.base(), 991 "area must be distinguishable from marks for mark-sweep"); 992 assert(markWord::encode_pointer_as_mark(&rhs.base()[rhs.size()]).decode_pointer() == 993 &rhs.base()[rhs.size()], 994 "area must be distinguishable from marks for mark-sweep"); 995 996 // We are good. 997 998 if (AllocateHeapAt != nullptr) { 999 log_info(gc,heap)("Successfully allocated Java heap at location %s", AllocateHeapAt); 1000 } 1001 1002 if (UseCompressedOops) { 1003 CompressedOops::initialize(rhs); 1004 } 1005 1006 Universe::calculate_verify_data((HeapWord*)rhs.base(), (HeapWord*)rhs.end()); 1007 1008 return rhs; 1009 } 1010 1011 OopStorage* Universe::vm_weak() { 1012 return Universe::_vm_weak; 1013 } 1014 1015 OopStorage* Universe::vm_global() { 1016 return Universe::_vm_global; 1017 } 1018 1019 void Universe::oopstorage_init() { 1020 Universe::_vm_global = OopStorageSet::create_strong("VM Global", mtInternal); 1021 Universe::_vm_weak = OopStorageSet::create_weak("VM Weak", mtInternal); 1022 } 1023 1024 void universe_oopstorage_init() { 1025 Universe::oopstorage_init(); 1026 } 1027 1028 void LatestMethodCache::init(JavaThread* current, InstanceKlass* ik, 1029 const char* method, Symbol* signature, bool is_static) 1030 { 1031 TempNewSymbol name = SymbolTable::new_symbol(method); 1032 Method* m = nullptr; 1033 // The klass must be linked before looking up the method. 1034 if (!ik->link_class_or_fail(current) || 1035 ((m = ik->find_method(name, signature)) == nullptr) || 1036 is_static != m->is_static()) { 1037 ResourceMark rm(current); 1038 // NoSuchMethodException doesn't actually work because it tries to run the 1039 // <init> function before java_lang_Class is linked. Print error and exit. 1040 vm_exit_during_initialization(err_msg("Unable to link/verify %s.%s method", 1041 ik->name()->as_C_string(), method)); 1042 } 1043 1044 _klass = ik; 1045 _method_idnum = m->method_idnum(); 1046 assert(_method_idnum >= 0, "sanity check"); 1047 } 1048 1049 Method* LatestMethodCache::get_method() { 1050 if (_klass == nullptr) { 1051 return nullptr; 1052 } else { 1053 Method* m = _klass->method_with_idnum(_method_idnum); 1054 assert(m != nullptr, "sanity check"); 1055 return m; 1056 } 1057 } 1058 1059 Method* Universe::finalizer_register_method() { return _finalizer_register_cache.get_method(); } 1060 Method* Universe::loader_addClass_method() { return _loader_addClass_cache.get_method(); } 1061 Method* Universe::throw_illegal_access_error() { return _throw_illegal_access_error_cache.get_method(); } 1062 Method* Universe::throw_no_such_method_error() { return _throw_no_such_method_error_cache.get_method(); } 1063 Method* Universe::do_stack_walk_method() { return _do_stack_walk_cache.get_method(); } 1064 1065 void Universe::initialize_known_methods(JavaThread* current) { 1066 // Set up static method for registering finalizers 1067 _finalizer_register_cache.init(current, 1068 vmClasses::Finalizer_klass(), 1069 "register", 1070 vmSymbols::object_void_signature(), true); 1071 1072 _throw_illegal_access_error_cache.init(current, 1073 vmClasses::internal_Unsafe_klass(), 1074 "throwIllegalAccessError", 1075 vmSymbols::void_method_signature(), true); 1076 1077 _throw_no_such_method_error_cache.init(current, 1078 vmClasses::internal_Unsafe_klass(), 1079 "throwNoSuchMethodError", 1080 vmSymbols::void_method_signature(), true); 1081 1082 // Set up method for registering loaded classes in class loader vector 1083 _loader_addClass_cache.init(current, 1084 vmClasses::ClassLoader_klass(), 1085 "addClass", 1086 vmSymbols::class_void_signature(), false); 1087 1088 // Set up method for stack walking 1089 _do_stack_walk_cache.init(current, 1090 vmClasses::AbstractStackWalker_klass(), 1091 "doStackWalk", 1092 vmSymbols::doStackWalk_signature(), false); 1093 } 1094 1095 void universe2_init() { 1096 EXCEPTION_MARK; 1097 Universe::genesis(CATCH); 1098 } 1099 1100 // Set after initialization of the module runtime, call_initModuleRuntime 1101 void universe_post_module_init() { 1102 Universe::_module_initialized = true; 1103 } 1104 1105 bool universe_post_init() { 1106 assert(!is_init_completed(), "Error: initialization not yet completed!"); 1107 Universe::_fully_initialized = true; 1108 EXCEPTION_MARK; 1109 if (!CDSConfig::is_using_archive()) { 1110 reinitialize_vtables(); 1111 reinitialize_itables(); 1112 } 1113 1114 HandleMark hm(THREAD); 1115 // Setup preallocated empty java.lang.Class array for Method reflection. 1116 1117 objArrayOop the_empty_class_array = oopFactory::new_objArray(vmClasses::Class_klass(), 0, CHECK_false); 1118 Universe::_the_empty_class_array = OopHandle(Universe::vm_global(), the_empty_class_array); 1119 1120 // Setup preallocated OutOfMemoryError errors 1121 Universe::create_preallocated_out_of_memory_errors(CHECK_false); 1122 1123 oop instance; 1124 // Setup preallocated cause message for delayed StackOverflowError 1125 if (StackReservedPages > 0) { 1126 instance = java_lang_String::create_oop_from_str("Delayed StackOverflowError due to ReservedStackAccess annotated method", CHECK_false); 1127 Universe::_delayed_stack_overflow_error_message = OopHandle(Universe::vm_global(), instance); 1128 } 1129 1130 // Setup preallocated exceptions used for a cheap & dirty solution in compiler exception handling 1131 _null_ptr_exception.init_if_empty(vmSymbols::java_lang_NullPointerException(), CHECK_false); 1132 _arithmetic_exception.init_if_empty(vmSymbols::java_lang_ArithmeticException(), CHECK_false); 1133 _array_index_out_of_bounds_exception.init_if_empty(vmSymbols::java_lang_ArrayIndexOutOfBoundsException(), CHECK_false); 1134 _array_store_exception.init_if_empty(vmSymbols::java_lang_ArrayStoreException(), CHECK_false); 1135 _class_cast_exception.init_if_empty(vmSymbols::java_lang_ClassCastException(), CHECK_false); 1136 1137 // Virtual Machine Error for when we get into a situation we can't resolve 1138 Klass* k = vmClasses::InternalError_klass(); 1139 bool linked = InstanceKlass::cast(k)->link_class_or_fail(CHECK_false); 1140 if (!linked) { 1141 tty->print_cr("Unable to link/verify InternalError class"); 1142 return false; // initialization failed 1143 } 1144 _internal_error.init_if_empty(vmSymbols::java_lang_InternalError(), CHECK_false); 1145 1146 Handle msg = java_lang_String::create_from_str("/ by zero", CHECK_false); 1147 java_lang_Throwable::set_message(Universe::arithmetic_exception_instance(), msg()); 1148 1149 // Setup preallocated StackOverflowError for use with class initialization failure 1150 k = SystemDictionary::resolve_or_fail(vmSymbols::java_lang_StackOverflowError(), true, CHECK_false); 1151 instance = InstanceKlass::cast(k)->allocate_instance(CHECK_false); 1152 Universe::_class_init_stack_overflow_error = OopHandle(Universe::vm_global(), instance); 1153 1154 Universe::initialize_known_methods(THREAD); 1155 1156 // This needs to be done before the first scavenge/gc, since 1157 // it's an input to soft ref clearing policy. 1158 { 1159 MutexLocker x(THREAD, Heap_lock); 1160 Universe::heap()->update_capacity_and_used_at_gc(); 1161 } 1162 1163 // ("weak") refs processing infrastructure initialization 1164 Universe::heap()->post_initialize(); 1165 1166 MemoryService::add_metaspace_memory_pools(); 1167 1168 MemoryService::set_universe_heap(Universe::heap()); 1169 #if INCLUDE_CDS 1170 AOTMetaspace::post_initialize(CHECK_false); 1171 #endif 1172 return true; 1173 } 1174 1175 1176 void Universe::compute_base_vtable_size() { 1177 _base_vtable_size = ClassLoader::compute_Object_vtable(); 1178 } 1179 1180 void Universe::print_on(outputStream* st) { 1181 GCMutexLocker hl(Heap_lock); // Heap_lock might be locked by caller thread. 1182 st->print_cr("Heap"); 1183 1184 StreamIndentor si(st, 1); 1185 heap()->print_heap_on(st); 1186 MetaspaceUtils::print_on(st); 1187 } 1188 1189 void Universe::print_heap_at_SIGBREAK() { 1190 if (PrintHeapAtSIGBREAK) { 1191 print_on(tty); 1192 tty->cr(); 1193 tty->flush(); 1194 } 1195 } 1196 1197 void Universe::initialize_verify_flags() { 1198 verify_flags = 0; 1199 const char delimiter[] = " ,"; 1200 1201 size_t length = strlen(VerifySubSet); 1202 char* subset_list = NEW_C_HEAP_ARRAY(char, length + 1, mtInternal); 1203 strncpy(subset_list, VerifySubSet, length + 1); 1204 char* save_ptr; 1205 1206 char* token = strtok_r(subset_list, delimiter, &save_ptr); 1207 while (token != nullptr) { 1208 if (strcmp(token, "threads") == 0) { 1209 verify_flags |= Verify_Threads; 1210 } else if (strcmp(token, "heap") == 0) { 1211 verify_flags |= Verify_Heap; 1212 } else if (strcmp(token, "symbol_table") == 0) { 1213 verify_flags |= Verify_SymbolTable; 1214 } else if (strcmp(token, "string_table") == 0) { 1215 verify_flags |= Verify_StringTable; 1216 } else if (strcmp(token, "codecache") == 0) { 1217 verify_flags |= Verify_CodeCache; 1218 } else if (strcmp(token, "dictionary") == 0) { 1219 verify_flags |= Verify_SystemDictionary; 1220 } else if (strcmp(token, "classloader_data_graph") == 0) { 1221 verify_flags |= Verify_ClassLoaderDataGraph; 1222 } else if (strcmp(token, "metaspace") == 0) { 1223 verify_flags |= Verify_MetaspaceUtils; 1224 } else if (strcmp(token, "jni_handles") == 0) { 1225 verify_flags |= Verify_JNIHandles; 1226 } else if (strcmp(token, "codecache_oops") == 0) { 1227 verify_flags |= Verify_CodeCacheOops; 1228 } else if (strcmp(token, "resolved_method_table") == 0) { 1229 verify_flags |= Verify_ResolvedMethodTable; 1230 } else if (strcmp(token, "stringdedup") == 0) { 1231 verify_flags |= Verify_StringDedup; 1232 } else { 1233 vm_exit_during_initialization(err_msg("VerifySubSet: \'%s\' memory sub-system is unknown, please correct it", token)); 1234 } 1235 token = strtok_r(nullptr, delimiter, &save_ptr); 1236 } 1237 FREE_C_HEAP_ARRAY(char, subset_list); 1238 } 1239 1240 bool Universe::should_verify_subset(uint subset) { 1241 if (verify_flags & subset) { 1242 return true; 1243 } 1244 return false; 1245 } 1246 1247 void Universe::verify(VerifyOption option, const char* prefix) { 1248 COMPILER2_PRESENT( 1249 assert(!DerivedPointerTable::is_active(), 1250 "DPT should not be active during verification " 1251 "(of thread stacks below)"); 1252 ) 1253 1254 Thread* thread = Thread::current(); 1255 ResourceMark rm(thread); 1256 HandleMark hm(thread); // Handles created during verification can be zapped 1257 _verify_count++; 1258 1259 FormatBuffer<> title("Verifying %s", prefix); 1260 GCTraceTime(Info, gc, verify) tm(title.buffer()); 1261 if (should_verify_subset(Verify_Threads)) { 1262 log_debug(gc, verify)("Threads"); 1263 Threads::verify(); 1264 } 1265 if (should_verify_subset(Verify_Heap)) { 1266 log_debug(gc, verify)("Heap"); 1267 heap()->verify(option); 1268 } 1269 if (should_verify_subset(Verify_SymbolTable)) { 1270 log_debug(gc, verify)("SymbolTable"); 1271 SymbolTable::verify(); 1272 } 1273 if (should_verify_subset(Verify_StringTable)) { 1274 log_debug(gc, verify)("StringTable"); 1275 StringTable::verify(); 1276 } 1277 if (should_verify_subset(Verify_CodeCache)) { 1278 log_debug(gc, verify)("CodeCache"); 1279 CodeCache::verify(); 1280 } 1281 if (should_verify_subset(Verify_SystemDictionary)) { 1282 log_debug(gc, verify)("SystemDictionary"); 1283 SystemDictionary::verify(); 1284 } 1285 if (should_verify_subset(Verify_ClassLoaderDataGraph)) { 1286 log_debug(gc, verify)("ClassLoaderDataGraph"); 1287 ClassLoaderDataGraph::verify(); 1288 } 1289 if (should_verify_subset(Verify_MetaspaceUtils)) { 1290 log_debug(gc, verify)("MetaspaceUtils"); 1291 DEBUG_ONLY(MetaspaceUtils::verify();) 1292 } 1293 if (should_verify_subset(Verify_JNIHandles)) { 1294 log_debug(gc, verify)("JNIHandles"); 1295 JNIHandles::verify(); 1296 } 1297 if (should_verify_subset(Verify_CodeCacheOops)) { 1298 log_debug(gc, verify)("CodeCache Oops"); 1299 CodeCache::verify_oops(); 1300 } 1301 if (should_verify_subset(Verify_ResolvedMethodTable)) { 1302 log_debug(gc, verify)("ResolvedMethodTable Oops"); 1303 ResolvedMethodTable::verify(); 1304 } 1305 if (should_verify_subset(Verify_StringDedup)) { 1306 log_debug(gc, verify)("String Deduplication"); 1307 StringDedup::verify(); 1308 } 1309 } 1310 1311 static void log_cpu_time() { 1312 LogTarget(Info, cpu) cpuLog; 1313 if (!cpuLog.is_enabled()) { 1314 return; 1315 } 1316 1317 const double process_cpu_time = os::elapsed_process_cpu_time(); 1318 if (process_cpu_time == 0 || process_cpu_time == -1) { 1319 // 0 can happen e.g. for short running processes with 1320 // low CPU utilization 1321 return; 1322 } 1323 1324 const double gc_threads_cpu_time = (double) CPUTimeUsage::GC::gc_threads() / NANOSECS_PER_SEC; 1325 const double gc_vm_thread_cpu_time = (double) CPUTimeUsage::GC::vm_thread() / NANOSECS_PER_SEC; 1326 const double gc_string_dedup_cpu_time = (double) CPUTimeUsage::GC::stringdedup() / NANOSECS_PER_SEC; 1327 const double gc_cpu_time = (double) gc_threads_cpu_time + gc_vm_thread_cpu_time + gc_string_dedup_cpu_time; 1328 1329 const double elasped_time = os::elapsedTime(); 1330 const bool has_error = CPUTimeUsage::Error::has_error(); 1331 1332 if (gc_cpu_time < process_cpu_time) { 1333 cpuLog.print("=== CPU time Statistics ============================================================="); 1334 if (has_error) { 1335 cpuLog.print("WARNING: CPU time sampling reported errors, numbers may be unreliable"); 1336 } 1337 cpuLog.print(" CPUs"); 1338 cpuLog.print(" s %% utilized"); 1339 cpuLog.print(" Process"); 1340 cpuLog.print(" Total %30.4f %6.2f %8.1f", process_cpu_time, 100.0, process_cpu_time / elasped_time); 1341 cpuLog.print(" Garbage Collection %30.4f %6.2f %8.1f", gc_cpu_time, percent_of(gc_cpu_time, process_cpu_time), gc_cpu_time / elasped_time); 1342 cpuLog.print(" GC Threads %30.4f %6.2f %8.1f", gc_threads_cpu_time, percent_of(gc_threads_cpu_time, process_cpu_time), gc_threads_cpu_time / elasped_time); 1343 cpuLog.print(" VM Thread %30.4f %6.2f %8.1f", gc_vm_thread_cpu_time, percent_of(gc_vm_thread_cpu_time, process_cpu_time), gc_vm_thread_cpu_time / elasped_time); 1344 1345 if (UseStringDeduplication) { 1346 cpuLog.print(" String Deduplication %30.4f %6.2f %8.1f", gc_string_dedup_cpu_time, percent_of(gc_string_dedup_cpu_time, process_cpu_time), gc_string_dedup_cpu_time / elasped_time); 1347 } 1348 cpuLog.print("====================================================================================="); 1349 } 1350 } 1351 1352 void Universe::before_exit() { 1353 { 1354 // Acquire the Heap_lock to synchronize with VM_Heap_Sync_Operations, 1355 // which may depend on the value of _is_shutting_down flag. 1356 MutexLocker hl(Heap_lock); 1357 log_cpu_time(); 1358 AtomicAccess::release_store(&_is_shutting_down, true); 1359 } 1360 1361 heap()->before_exit(); 1362 1363 // Print GC/heap related information. 1364 Log(gc, exit) log; 1365 if (log.is_info()) { 1366 LogStream ls_info(log.info()); 1367 Universe::print_on(&ls_info); 1368 if (log.is_trace()) { 1369 LogStream ls_trace(log.trace()); 1370 MutexLocker mcld(ClassLoaderDataGraph_lock); 1371 ClassLoaderDataGraph::print_on(&ls_trace); 1372 } 1373 } 1374 } 1375 1376 #ifndef PRODUCT 1377 void Universe::calculate_verify_data(HeapWord* low_boundary, HeapWord* high_boundary) { 1378 assert(low_boundary < high_boundary, "bad interval"); 1379 1380 // decide which low-order bits we require to be clear: 1381 size_t alignSize = MinObjAlignmentInBytes; 1382 size_t min_object_size = CollectedHeap::min_fill_size(); 1383 1384 // make an inclusive limit: 1385 uintptr_t max = (uintptr_t)high_boundary - min_object_size*wordSize; 1386 uintptr_t min = (uintptr_t)low_boundary; 1387 assert(min < max, "bad interval"); 1388 uintptr_t diff = max ^ min; 1389 1390 // throw away enough low-order bits to make the diff vanish 1391 uintptr_t mask = (uintptr_t)(-1); 1392 while ((mask & diff) != 0) 1393 mask <<= 1; 1394 uintptr_t bits = (min & mask); 1395 assert(bits == (max & mask), "correct mask"); 1396 // check an intermediate value between min and max, just to make sure: 1397 assert(bits == ((min + (max-min)/2) & mask), "correct mask"); 1398 1399 // require address alignment, too: 1400 mask |= (alignSize - 1); 1401 1402 if (!(_verify_oop_mask == 0 && _verify_oop_bits == (uintptr_t)-1)) { 1403 assert(_verify_oop_mask == mask && _verify_oop_bits == bits, "mask stability"); 1404 } 1405 _verify_oop_mask = mask; 1406 _verify_oop_bits = bits; 1407 } 1408 1409 void Universe::set_verify_data(uintptr_t mask, uintptr_t bits) { 1410 _verify_oop_mask = mask; 1411 _verify_oop_bits = bits; 1412 } 1413 1414 // Oop verification (see MacroAssembler::verify_oop) 1415 1416 uintptr_t Universe::verify_oop_mask() { 1417 return _verify_oop_mask; 1418 } 1419 1420 uintptr_t Universe::verify_oop_bits() { 1421 return _verify_oop_bits; 1422 } 1423 1424 uintptr_t Universe::verify_mark_mask() { 1425 return markWord::lock_mask_in_place; 1426 } 1427 1428 uintptr_t Universe::verify_mark_bits() { 1429 intptr_t mask = verify_mark_mask(); 1430 intptr_t bits = (intptr_t)markWord::prototype().value(); 1431 assert((bits & ~mask) == 0, "no stray header bits"); 1432 return bits; 1433 } 1434 #endif // PRODUCT 1435 1436 #ifdef ASSERT 1437 // Release dummy object(s) at bottom of heap 1438 bool Universe::release_fullgc_alot_dummy() { 1439 MutexLocker ml(FullGCALot_lock); 1440 objArrayOop fullgc_alot_dummy_array = (objArrayOop)_fullgc_alot_dummy_array.resolve(); 1441 if (fullgc_alot_dummy_array != nullptr) { 1442 if (_fullgc_alot_dummy_next >= fullgc_alot_dummy_array->length()) { 1443 // No more dummies to release, release entire array instead 1444 _fullgc_alot_dummy_array.release(Universe::vm_global()); 1445 _fullgc_alot_dummy_array = OopHandle(); // null out OopStorage pointer. 1446 return false; 1447 } 1448 1449 // Release dummy at bottom of old generation 1450 fullgc_alot_dummy_array->obj_at_put(_fullgc_alot_dummy_next++, nullptr); 1451 } 1452 return true; 1453 } 1454 1455 bool Universe::is_stw_gc_active() { 1456 return heap()->is_stw_gc_active(); 1457 } 1458 1459 bool Universe::is_in_heap(const void* p) { 1460 return heap()->is_in(p); 1461 } 1462 1463 #endif // ASSERT