343 LoadedArchiveHeapRegion loaded_region;
344 memset(&loaded_region, 0, sizeof(loaded_region));
345
346 MemRegion archive_space;
347 if (!init_loaded_region(mapinfo, &loaded_region, archive_space)) {
348 return false;
349 }
350
351 if (!load_heap_region_impl(mapinfo, &loaded_region, (uintptr_t)archive_space.start())) {
352 assert(_loading_failed, "must be");
353 return false;
354 }
355
356 init_loaded_heap_relocation(&loaded_region);
357 _is_loaded = true;
358
359 return true;
360 }
361
362 objArrayOop AOTMappedHeapLoader::root_segment(int segment_idx) {
363 if (CDSConfig::is_dumping_heap()) {
364 assert(Thread::current() == (Thread*)VMThread::vm_thread(), "should be in vm thread");
365 } else {
366 assert(CDSConfig::is_using_archive(), "must be");
367 }
368
369 objArrayOop segment = (objArrayOop)_root_segments->at(segment_idx).resolve();
370 assert(segment != nullptr, "should have been initialized");
371 return segment;
372 }
373
374 void AOTMappedHeapLoader::get_segment_indexes(int idx, int& seg_idx, int& int_idx) {
375 assert(_root_segment_max_size_elems > 0, "sanity");
376
377 // Try to avoid divisions for the common case.
378 if (idx < _root_segment_max_size_elems) {
379 seg_idx = 0;
380 int_idx = idx;
381 } else {
382 seg_idx = idx / _root_segment_max_size_elems;
383 int_idx = idx % _root_segment_max_size_elems;
493 p += o->size();
494 }
495
496 for (HeapWord* p = bottom; p < top; ) {
497 oop o = cast_to_oop(p);
498 o->oop_iterate(&verifier);
499 p += o->size();
500 }
501 }
502
503 void AOTMappedHeapLoader::fill_failed_loaded_heap() {
504 assert(_loading_failed, "must be");
505 if (_loaded_heap_bottom != 0) {
506 assert(_loaded_heap_top != 0, "must be");
507 HeapWord* bottom = (HeapWord*)_loaded_heap_bottom;
508 HeapWord* top = (HeapWord*)_loaded_heap_top;
509 Universe::heap()->fill_with_objects(bottom, top - bottom);
510 }
511 }
512
513 class PatchNativePointers: public BitMapClosure {
514 Metadata** _start;
515
516 public:
517 PatchNativePointers(Metadata** start) : _start(start) {}
518
519 bool do_bit(size_t offset) {
520 Metadata** p = _start + offset;
521 *p = (Metadata*)(address(*p) + AOTMetaspace::relocation_delta());
522 return true;
523 }
524 };
525
526 void AOTMappedHeapLoader::patch_native_pointers() {
527 if (AOTMetaspace::relocation_delta() == 0) {
528 return;
529 }
530
531 FileMapRegion* r = FileMapInfo::current_info()->region_at(AOTMetaspace::hp);
532 if (r->mapped_base() != nullptr && r->has_ptrmap()) {
|
343 LoadedArchiveHeapRegion loaded_region;
344 memset(&loaded_region, 0, sizeof(loaded_region));
345
346 MemRegion archive_space;
347 if (!init_loaded_region(mapinfo, &loaded_region, archive_space)) {
348 return false;
349 }
350
351 if (!load_heap_region_impl(mapinfo, &loaded_region, (uintptr_t)archive_space.start())) {
352 assert(_loading_failed, "must be");
353 return false;
354 }
355
356 init_loaded_heap_relocation(&loaded_region);
357 _is_loaded = true;
358
359 return true;
360 }
361
362 objArrayOop AOTMappedHeapLoader::root_segment(int segment_idx) {
363 if (CDSConfig::is_dumping_heap() && !CDSConfig::is_dumping_final_static_archive()) {
364 assert(Thread::current() == (Thread*)VMThread::vm_thread(), "should be in vm thread");
365 } else {
366 assert(CDSConfig::is_using_archive(), "must be");
367 }
368
369 objArrayOop segment = (objArrayOop)_root_segments->at(segment_idx).resolve();
370 assert(segment != nullptr, "should have been initialized");
371 return segment;
372 }
373
374 void AOTMappedHeapLoader::get_segment_indexes(int idx, int& seg_idx, int& int_idx) {
375 assert(_root_segment_max_size_elems > 0, "sanity");
376
377 // Try to avoid divisions for the common case.
378 if (idx < _root_segment_max_size_elems) {
379 seg_idx = 0;
380 int_idx = idx;
381 } else {
382 seg_idx = idx / _root_segment_max_size_elems;
383 int_idx = idx % _root_segment_max_size_elems;
493 p += o->size();
494 }
495
496 for (HeapWord* p = bottom; p < top; ) {
497 oop o = cast_to_oop(p);
498 o->oop_iterate(&verifier);
499 p += o->size();
500 }
501 }
502
503 void AOTMappedHeapLoader::fill_failed_loaded_heap() {
504 assert(_loading_failed, "must be");
505 if (_loaded_heap_bottom != 0) {
506 assert(_loaded_heap_top != 0, "must be");
507 HeapWord* bottom = (HeapWord*)_loaded_heap_bottom;
508 HeapWord* top = (HeapWord*)_loaded_heap_top;
509 Universe::heap()->fill_with_objects(bottom, top - bottom);
510 }
511 }
512
513 oop AOTMappedHeapLoader::oop_from_offset(int offset) {
514 // Once GC starts, the offsets saved in CachedCodeDirectoryInternal::_permanent_oop_offsets
515 // will become invalid. I don't know what function can check if GCs are allowed, but surely
516 // GCs can't happen before the Object class is loaded.
517 assert(CDSConfig::is_using_archive(), "sanity");
518 assert(vmClasses::Object_klass()->class_loader_data() == nullptr,
519 "can be called only very early during VM start-up");
520 if (is_loaded()) {
521 return cast_to_oop(_loaded_heap_bottom + offset);
522 } else {
523 assert(is_mapped(), "must be");
524 return cast_to_oop(_mapped_heap_bottom + offset);
525 }
526 }
527
528 class PatchNativePointers: public BitMapClosure {
529 Metadata** _start;
530
531 public:
532 PatchNativePointers(Metadata** start) : _start(start) {}
533
534 bool do_bit(size_t offset) {
535 Metadata** p = _start + offset;
536 *p = (Metadata*)(address(*p) + AOTMetaspace::relocation_delta());
537 return true;
538 }
539 };
540
541 void AOTMappedHeapLoader::patch_native_pointers() {
542 if (AOTMetaspace::relocation_delta() == 0) {
543 return;
544 }
545
546 FileMapRegion* r = FileMapInfo::current_info()->region_at(AOTMetaspace::hp);
547 if (r->mapped_base() != nullptr && r->has_ptrmap()) {
|