24
25 #include "cds/aotLogging.hpp"
26 #include "cds/aotMappedHeap.hpp"
27 #include "cds/aotMappedHeapLoader.inline.hpp"
28 #include "cds/aotMappedHeapWriter.hpp"
29 #include "cds/aotMetaspace.hpp"
30 #include "cds/cdsConfig.hpp"
31 #include "cds/heapShared.inline.hpp"
32 #include "classfile/classLoaderDataShared.hpp"
33 #include "classfile/stringTable.hpp"
34 #include "classfile/systemDictionaryShared.hpp"
35 #include "gc/shared/collectedHeap.hpp"
36 #include "logging/log.hpp"
37 #include "logging/logMessage.hpp"
38 #include "logging/logStream.hpp"
39 #include "logging/logTag.hpp"
40 #include "memory/allocation.inline.hpp"
41 #include "memory/iterator.inline.hpp"
42 #include "memory/resourceArea.hpp"
43 #include "memory/universe.hpp"
44 #include "sanitizers/ub.hpp"
45 #include "utilities/bitMap.inline.hpp"
46 #include "utilities/copy.hpp"
47 #if INCLUDE_G1GC
48 #include "gc/g1/g1CollectedHeap.hpp"
49 #include "gc/g1/g1HeapRegion.hpp"
50 #endif
51
52 #if INCLUDE_CDS_JAVA_HEAP
53
54 bool AOTMappedHeapLoader::_is_mapped = false;
55 bool AOTMappedHeapLoader::_is_loaded = false;
56
57 bool AOTMappedHeapLoader::_narrow_oop_base_initialized = false;
58 address AOTMappedHeapLoader::_narrow_oop_base;
59 int AOTMappedHeapLoader::_narrow_oop_shift;
60
61 // Support for loaded heap.
62 uintptr_t AOTMappedHeapLoader::_loaded_heap_bottom = 0;
63 uintptr_t AOTMappedHeapLoader::_loaded_heap_top = 0;
343
344 AOTMappedHeapRegion loaded_region;
345 memset(&loaded_region, 0, sizeof(loaded_region));
346
347 MemRegion archive_space;
348 if (!init_loaded_region(mapinfo, &loaded_region, archive_space)) {
349 return false;
350 }
351
352 if (!load_heap_region_impl(mapinfo, &loaded_region, (uintptr_t)archive_space.start())) {
353 assert(_loading_failed, "must be");
354 return false;
355 }
356
357 init_loaded_heap_relocation(&loaded_region);
358 _is_loaded = true;
359
360 return true;
361 }
362
363 objArrayOop AOTMappedHeapLoader::root_segment(int segment_idx) {
364 if (!CDSConfig::is_using_archive()) {
365 assert(CDSConfig::is_dumping_heap() && Thread::current() == (Thread*)VMThread::vm_thread(), "sanity");
366 }
367
368 objArrayOop segment = (objArrayOop)_root_segments->at(segment_idx).resolve();
369 assert(segment != nullptr, "should have been initialized");
370 return segment;
371 }
372
373 void AOTMappedHeapLoader::get_segment_indexes(int idx, int& seg_idx, int& int_idx) {
374 assert(_root_segment_max_size_elems > 0, "sanity");
375
376 // Try to avoid divisions for the common case.
377 if (idx < _root_segment_max_size_elems) {
378 seg_idx = 0;
379 int_idx = idx;
380 } else {
381 seg_idx = idx / _root_segment_max_size_elems;
382 int_idx = idx % _root_segment_max_size_elems;
383 }
384
385 assert(idx == seg_idx * _root_segment_max_size_elems + int_idx,
386 "sanity: %d index maps to %d segment and %d internal", idx, seg_idx, int_idx);
387 }
388
389 void AOTMappedHeapLoader::add_root_segment(objArrayOop segment_oop) {
390 assert(segment_oop != nullptr, "must be");
391 assert(is_in_use(), "must be");
392 if (_root_segments == nullptr) {
393 _root_segments = new GrowableArrayCHeap<OopHandle, mtClassShared>(10);
394 }
395 _root_segments->push(OopHandle(Universe::vm_global(), segment_oop));
396 }
397
398 void AOTMappedHeapLoader::init_root_segment_sizes(int max_size_elems) {
399 _root_segment_max_size_elems = max_size_elems;
400 }
401
402 oop AOTMappedHeapLoader::get_root(int index) {
403 assert(!_root_segments->is_empty(), "must have loaded shared heap");
404 int seg_idx, int_idx;
405 get_segment_indexes(index, seg_idx, int_idx);
406 objArrayOop result = objArrayOop(root_segment(seg_idx));
407 return result->obj_at(int_idx);
408 }
409
410 void AOTMappedHeapLoader::clear_root(int index) {
411 int seg_idx, int_idx;
412 get_segment_indexes(index, seg_idx, int_idx);
413 root_segment(seg_idx)->obj_at_put(int_idx, nullptr);
414 }
415
416 class VerifyLoadedHeapEmbeddedPointers: public BasicOopIterateClosure {
417 HashTable<uintptr_t, bool>* _table;
418
419 public:
420 VerifyLoadedHeapEmbeddedPointers(HashTable<uintptr_t, bool>* table) : _table(table) {}
421
422 virtual void do_oop(narrowOop* p) {
423 // This should be called before the loaded region is modified, so all the embedded pointers
424 // must be null, or must point to a valid object in the loaded region.
425 narrowOop v = *p;
426 if (!CompressedOops::is_null(v)) {
444 patch_heap_embedded_pointers(info);
445
446 if (is_loaded()) {
447 // These operations are needed only when the heap is loaded (not mapped).
448 finish_loaded_heap();
449 if (VerifyArchivedFields > 0) {
450 verify_loaded_heap();
451 }
452 }
453 if (is_in_use()) {
454 patch_native_pointers();
455 intptr_t bottom = is_loaded() ? _loaded_heap_bottom : _mapped_heap_bottom;
456
457 // The heap roots are stored in one or more segments that are laid out consecutively.
458 // The size of each segment (except for the last one) is max_size_in_{elems,bytes}.
459 HeapRootSegments segments = FileMapInfo::current_info()->mapped_heap()->root_segments();
460 init_root_segment_sizes(segments.max_size_in_elems());
461 intptr_t first_segment_addr = bottom + segments.base_offset();
462 for (size_t c = 0; c < segments.count(); c++) {
463 oop segment_oop = cast_to_oop(first_segment_addr + (c * segments.max_size_in_bytes()));
464 assert(segment_oop->is_objArray(), "Must be");
465 add_root_segment((objArrayOop)segment_oop);
466 }
467
468 if (CDSConfig::is_dumping_final_static_archive()) {
469 StringTable::move_shared_strings_into_runtime_table();
470 }
471 }
472 }
473
474 void AOTMappedHeapLoader::finish_loaded_heap() {
475 HeapWord* bottom = (HeapWord*)_loaded_heap_bottom;
476 HeapWord* top = (HeapWord*)_loaded_heap_top;
477
478 MemRegion archive_space = MemRegion(bottom, top);
479 Universe::heap()->complete_loaded_archive_space(archive_space);
480 }
481
482 void AOTMappedHeapLoader::verify_loaded_heap() {
483 log_info(aot, heap)("Verify all oops and pointers in loaded heap");
484
485 ResourceMark rm;
|
24
25 #include "cds/aotLogging.hpp"
26 #include "cds/aotMappedHeap.hpp"
27 #include "cds/aotMappedHeapLoader.inline.hpp"
28 #include "cds/aotMappedHeapWriter.hpp"
29 #include "cds/aotMetaspace.hpp"
30 #include "cds/cdsConfig.hpp"
31 #include "cds/heapShared.inline.hpp"
32 #include "classfile/classLoaderDataShared.hpp"
33 #include "classfile/stringTable.hpp"
34 #include "classfile/systemDictionaryShared.hpp"
35 #include "gc/shared/collectedHeap.hpp"
36 #include "logging/log.hpp"
37 #include "logging/logMessage.hpp"
38 #include "logging/logStream.hpp"
39 #include "logging/logTag.hpp"
40 #include "memory/allocation.inline.hpp"
41 #include "memory/iterator.inline.hpp"
42 #include "memory/resourceArea.hpp"
43 #include "memory/universe.hpp"
44 #include "oops/oopCast.inline.hpp"
45 #include "sanitizers/ub.hpp"
46 #include "utilities/bitMap.inline.hpp"
47 #include "utilities/copy.hpp"
48 #if INCLUDE_G1GC
49 #include "gc/g1/g1CollectedHeap.hpp"
50 #include "gc/g1/g1HeapRegion.hpp"
51 #endif
52
53 #if INCLUDE_CDS_JAVA_HEAP
54
55 bool AOTMappedHeapLoader::_is_mapped = false;
56 bool AOTMappedHeapLoader::_is_loaded = false;
57
58 bool AOTMappedHeapLoader::_narrow_oop_base_initialized = false;
59 address AOTMappedHeapLoader::_narrow_oop_base;
60 int AOTMappedHeapLoader::_narrow_oop_shift;
61
62 // Support for loaded heap.
63 uintptr_t AOTMappedHeapLoader::_loaded_heap_bottom = 0;
64 uintptr_t AOTMappedHeapLoader::_loaded_heap_top = 0;
344
345 AOTMappedHeapRegion loaded_region;
346 memset(&loaded_region, 0, sizeof(loaded_region));
347
348 MemRegion archive_space;
349 if (!init_loaded_region(mapinfo, &loaded_region, archive_space)) {
350 return false;
351 }
352
353 if (!load_heap_region_impl(mapinfo, &loaded_region, (uintptr_t)archive_space.start())) {
354 assert(_loading_failed, "must be");
355 return false;
356 }
357
358 init_loaded_heap_relocation(&loaded_region);
359 _is_loaded = true;
360
361 return true;
362 }
363
364 refArrayOop AOTMappedHeapLoader::root_segment(int segment_idx) {
365 if (!CDSConfig::is_using_archive()) {
366 assert(CDSConfig::is_dumping_heap() && Thread::current() == (Thread*)VMThread::vm_thread(), "sanity");
367 }
368
369 oop segment = _root_segments->at(segment_idx).resolve();
370 assert(segment != nullptr, "should have been initialized");
371 return oop_cast<refArrayOop>(segment);
372 }
373
374 void AOTMappedHeapLoader::get_segment_indexes(int idx, int& seg_idx, int& int_idx) {
375 assert(_root_segment_max_size_elems > 0, "sanity");
376
377 // Try to avoid divisions for the common case.
378 if (idx < _root_segment_max_size_elems) {
379 seg_idx = 0;
380 int_idx = idx;
381 } else {
382 seg_idx = idx / _root_segment_max_size_elems;
383 int_idx = idx % _root_segment_max_size_elems;
384 }
385
386 assert(idx == seg_idx * _root_segment_max_size_elems + int_idx,
387 "sanity: %d index maps to %d segment and %d internal", idx, seg_idx, int_idx);
388 }
389
390 void AOTMappedHeapLoader::add_root_segment(refArrayOop segment_oop) {
391 assert(segment_oop != nullptr, "must be");
392 assert(is_in_use(), "must be");
393 if (_root_segments == nullptr) {
394 _root_segments = new GrowableArrayCHeap<OopHandle, mtClassShared>(10);
395 }
396 _root_segments->push(OopHandle(Universe::vm_global(), segment_oop));
397 }
398
399 void AOTMappedHeapLoader::init_root_segment_sizes(int max_size_elems) {
400 _root_segment_max_size_elems = max_size_elems;
401 }
402
403 oop AOTMappedHeapLoader::get_root(int index) {
404 assert(!_root_segments->is_empty(), "must have loaded shared heap");
405 int seg_idx, int_idx;
406 get_segment_indexes(index, seg_idx, int_idx);
407 refArrayOop result = root_segment(seg_idx);
408 return result->obj_at(int_idx);
409 }
410
411 void AOTMappedHeapLoader::clear_root(int index) {
412 int seg_idx, int_idx;
413 get_segment_indexes(index, seg_idx, int_idx);
414 root_segment(seg_idx)->obj_at_put(int_idx, nullptr);
415 }
416
417 class VerifyLoadedHeapEmbeddedPointers: public BasicOopIterateClosure {
418 HashTable<uintptr_t, bool>* _table;
419
420 public:
421 VerifyLoadedHeapEmbeddedPointers(HashTable<uintptr_t, bool>* table) : _table(table) {}
422
423 virtual void do_oop(narrowOop* p) {
424 // This should be called before the loaded region is modified, so all the embedded pointers
425 // must be null, or must point to a valid object in the loaded region.
426 narrowOop v = *p;
427 if (!CompressedOops::is_null(v)) {
445 patch_heap_embedded_pointers(info);
446
447 if (is_loaded()) {
448 // These operations are needed only when the heap is loaded (not mapped).
449 finish_loaded_heap();
450 if (VerifyArchivedFields > 0) {
451 verify_loaded_heap();
452 }
453 }
454 if (is_in_use()) {
455 patch_native_pointers();
456 intptr_t bottom = is_loaded() ? _loaded_heap_bottom : _mapped_heap_bottom;
457
458 // The heap roots are stored in one or more segments that are laid out consecutively.
459 // The size of each segment (except for the last one) is max_size_in_{elems,bytes}.
460 HeapRootSegments segments = FileMapInfo::current_info()->mapped_heap()->root_segments();
461 init_root_segment_sizes(segments.max_size_in_elems());
462 intptr_t first_segment_addr = bottom + segments.base_offset();
463 for (size_t c = 0; c < segments.count(); c++) {
464 oop segment_oop = cast_to_oop(first_segment_addr + (c * segments.max_size_in_bytes()));
465 add_root_segment(oop_cast<refArrayOop>(segment_oop));
466 }
467
468 if (CDSConfig::is_dumping_final_static_archive()) {
469 StringTable::move_shared_strings_into_runtime_table();
470 }
471 }
472 }
473
474 void AOTMappedHeapLoader::finish_loaded_heap() {
475 HeapWord* bottom = (HeapWord*)_loaded_heap_bottom;
476 HeapWord* top = (HeapWord*)_loaded_heap_top;
477
478 MemRegion archive_space = MemRegion(bottom, top);
479 Universe::heap()->complete_loaded_archive_space(archive_space);
480 }
481
482 void AOTMappedHeapLoader::verify_loaded_heap() {
483 log_info(aot, heap)("Verify all oops and pointers in loaded heap");
484
485 ResourceMark rm;
|