22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "cds/archiveBuilder.hpp"
27 #include "cds/archiveUtils.hpp"
28 #include "cds/cppVtables.hpp"
29 #include "cds/dumpAllocStats.hpp"
30 #include "cds/heapShared.hpp"
31 #include "cds/metaspaceShared.hpp"
32 #include "classfile/classLoaderDataShared.hpp"
33 #include "classfile/symbolTable.hpp"
34 #include "classfile/systemDictionaryShared.hpp"
35 #include "classfile/vmClasses.hpp"
36 #include "interpreter/abstractInterpreter.hpp"
37 #include "logging/log.hpp"
38 #include "logging/logStream.hpp"
39 #include "memory/allStatic.hpp"
40 #include "memory/memRegion.hpp"
41 #include "memory/resourceArea.hpp"
42 #include "oops/instanceKlass.hpp"
43 #include "oops/objArrayKlass.hpp"
44 #include "oops/objArrayOop.inline.hpp"
45 #include "oops/oopHandle.inline.hpp"
46 #include "runtime/arguments.hpp"
47 #include "runtime/globals_extension.hpp"
48 #include "runtime/sharedRuntime.hpp"
49 #include "runtime/thread.hpp"
50 #include "utilities/align.hpp"
51 #include "utilities/bitMap.inline.hpp"
52 #include "utilities/formatBuffer.hpp"
53 #include "utilities/hashtable.inline.hpp"
54
55 ArchiveBuilder* ArchiveBuilder::_current = NULL;
56
57 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
58 char* newtop = ArchiveBuilder::current()->_ro_region.top();
59 ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
60 }
61
62 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K) {
201 GatherKlassesAndSymbols(ArchiveBuilder* builder) : _builder(builder) {}
202
203 virtual bool do_unique_ref(Ref* ref, bool read_only) {
204 return _builder->gather_klass_and_symbol(ref, read_only);
205 }
206 };
207
208 bool ArchiveBuilder::gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only) {
209 if (ref->obj() == NULL) {
210 return false;
211 }
212 if (get_follow_mode(ref) != make_a_copy) {
213 return false;
214 }
215 if (ref->msotype() == MetaspaceObj::ClassType) {
216 Klass* klass = (Klass*)ref->obj();
217 assert(klass->is_klass(), "must be");
218 if (!is_excluded(klass)) {
219 _klasses->append(klass);
220 }
221 // See RunTimeClassInfo::get_for()
222 _estimated_metaspaceobj_bytes += align_up(BytesPerWord, SharedSpaceObjectAlignment);
223 } else if (ref->msotype() == MetaspaceObj::SymbolType) {
224 // Make sure the symbol won't be GC'ed while we are dumping the archive.
225 Symbol* sym = (Symbol*)ref->obj();
226 sym->increment_refcount();
227 _symbols->append(sym);
228 }
229
230 int bytes = ref->size() * BytesPerWord;
231 _estimated_metaspaceobj_bytes += align_up(bytes, SharedSpaceObjectAlignment);
232
233 return true; // recurse
234 }
235
236 void ArchiveBuilder::gather_klasses_and_symbols() {
237 ResourceMark rm;
238 log_info(cds)("Gathering classes and symbols ... ");
239 GatherKlassesAndSymbols doit(this);
240 iterate_roots(&doit, /*is_relocating_pointers=*/false);
241 #if INCLUDE_CDS_JAVA_HEAP
242 if (is_dumping_full_module_graph()) {
599 }
600
601 void ArchiveBuilder::make_shallow_copies(DumpRegion *dump_region,
602 const ArchiveBuilder::SourceObjList* src_objs) {
603 for (int i = 0; i < src_objs->objs()->length(); i++) {
604 make_shallow_copy(dump_region, src_objs->objs()->at(i));
605 }
606 log_info(cds)("done (%d objects)", src_objs->objs()->length());
607 }
608
609 void ArchiveBuilder::make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info) {
610 MetaspaceClosure::Ref* ref = src_info->ref();
611 address src = ref->obj();
612 int bytes = src_info->size_in_bytes();
613 char* dest;
614 char* oldtop;
615 char* newtop;
616
617 oldtop = dump_region->top();
618 if (ref->msotype() == MetaspaceObj::ClassType) {
619 // Save a pointer immediate in front of an InstanceKlass, so
620 // we can do a quick lookup from InstanceKlass* -> RunTimeClassInfo*
621 // without building another hashtable. See RunTimeClassInfo::get_for()
622 // in systemDictionaryShared.cpp.
623 Klass* klass = (Klass*)src;
624 if (klass->is_instance_klass()) {
625 SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass));
626 dump_region->allocate(sizeof(address));
627 }
628 }
629 dest = dump_region->allocate(bytes);
630 newtop = dump_region->top();
631
632 memcpy(dest, src, bytes);
633
634 intptr_t* archived_vtable = CppVtables::get_archived_vtable(ref->msotype(), (address)dest);
635 if (archived_vtable != NULL) {
636 *(address*)dest = (address)archived_vtable;
637 ArchivePtrMarker::mark_pointer((address*)dest);
638 }
639
640 log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d", p2i(src), p2i(dest), bytes);
641 src_info->set_dumped_addr((address)dest);
642
643 _alloc_stats.record(ref->msotype(), int(newtop - oldtop), src_info->read_only());
644 }
645
646 address ArchiveBuilder::get_dumped_addr(address src_obj) const {
647 SourceObjInfo* p = _src_obj_table.get(src_obj);
648 assert(p != NULL, "must be");
649
650 return p->dumped_addr();
651 }
652
653 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
654 for (int i = 0; i < src_objs->objs()->length(); i++) {
655 src_objs->relocate(i, this);
656 }
657 }
658
659 void ArchiveBuilder::update_special_refs() {
660 for (int i = 0; i < _special_refs->length(); i++) {
661 SpecialRefInfo s = _special_refs->at(i);
662 size_t field_offset = s.field_offset();
663 address src_obj = s.src_obj();
712 vmClasses::metaspace_pointers_do(&doit);
713 }
714
715 void ArchiveBuilder::make_klasses_shareable() {
716 int num_instance_klasses = 0;
717 int num_boot_klasses = 0;
718 int num_platform_klasses = 0;
719 int num_app_klasses = 0;
720 int num_hidden_klasses = 0;
721 int num_unlinked_klasses = 0;
722 int num_unregistered_klasses = 0;
723 int num_obj_array_klasses = 0;
724 int num_type_array_klasses = 0;
725
726 for (int i = 0; i < klasses()->length(); i++) {
727 const char* type;
728 const char* unlinked = "";
729 const char* hidden = "";
730 Klass* k = klasses()->at(i);
731 k->remove_java_mirror();
732 if (k->is_objArray_klass()) {
733 // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
734 // on their array classes.
735 num_obj_array_klasses ++;
736 type = "array";
737 } else if (k->is_typeArray_klass()) {
738 num_type_array_klasses ++;
739 type = "array";
740 k->remove_unshareable_info();
741 } else {
742 assert(k->is_instance_klass(), " must be");
743 num_instance_klasses ++;
744 InstanceKlass* ik = InstanceKlass::cast(k);
745 if (DynamicDumpSharedSpaces) {
746 // For static dump, class loader type are already set.
747 ik->assign_class_loader_type();
748 }
749 if (ik->is_shared_boot_class()) {
750 type = "boot";
751 num_boot_klasses ++;
798 address requested_p = to_requested(p);
799 assert(requested_p >= _requested_static_archive_bottom, "must be");
800 return requested_p - _requested_static_archive_bottom;
801 }
802
803 uintx ArchiveBuilder::any_to_offset(address p) const {
804 if (is_in_mapped_static_archive(p)) {
805 assert(DynamicDumpSharedSpaces, "must be");
806 return p - _mapped_static_archive_bottom;
807 }
808 return buffer_to_offset(p);
809 }
810
811 // Update a Java object to point its Klass* to the new location after
812 // shared archive has been compacted.
813 void ArchiveBuilder::relocate_klass_ptr(oop o) {
814 assert(DumpSharedSpaces, "sanity");
815 Klass* k = get_relocated_klass(o->klass());
816 Klass* requested_k = to_requested(k);
817 narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
818 o->set_narrow_klass(nk);
819 }
820
821 // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
822 // so that the archive can be mapped to the "requested" location without runtime relocation.
823 //
824 // - See ArchiveBuilder header for the definition of "buffer", "mapped" and "requested"
825 // - ArchivePtrMarker::ptrmap() marks all the pointers in the rw/ro regions
826 // - Every pointer must have one of the following values:
827 // [a] NULL:
828 // No relocation is needed. Remove this pointer from ptrmap so we don't need to
829 // consider it at runtime.
830 // [b] Points into an object X which is inside the buffer:
831 // Adjust this pointer by _buffer_to_requested_delta, so it points to X
832 // when the archive is mapped at the requested location.
833 // [c] Points into an object Y which is inside mapped static archive:
834 // - This happens only during dynamic dump
835 // - Adjust this pointer by _mapped_to_requested_static_archive_delta,
836 // so it points to Y when the static archive is mapped at the requested location.
837 template <bool STATIC_DUMP>
838 class RelocateBufferToRequested : public BitMapClosure {
|
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "cds/archiveBuilder.hpp"
27 #include "cds/archiveUtils.hpp"
28 #include "cds/cppVtables.hpp"
29 #include "cds/dumpAllocStats.hpp"
30 #include "cds/heapShared.hpp"
31 #include "cds/metaspaceShared.hpp"
32 #include "classfile/classLoaderDataShared.hpp"
33 #include "classfile/symbolTable.hpp"
34 #include "classfile/systemDictionaryShared.hpp"
35 #include "classfile/vmClasses.hpp"
36 #include "interpreter/abstractInterpreter.hpp"
37 #include "logging/log.hpp"
38 #include "logging/logStream.hpp"
39 #include "memory/allStatic.hpp"
40 #include "memory/memRegion.hpp"
41 #include "memory/resourceArea.hpp"
42 #include "oops/compressedKlass.inline.hpp"
43 #include "oops/instanceKlass.hpp"
44 #include "oops/klass.inline.hpp"
45 #include "oops/objArrayKlass.hpp"
46 #include "oops/objArrayOop.inline.hpp"
47 #include "oops/oopHandle.inline.hpp"
48 #include "runtime/arguments.hpp"
49 #include "runtime/globals_extension.hpp"
50 #include "runtime/sharedRuntime.hpp"
51 #include "runtime/thread.hpp"
52 #include "utilities/align.hpp"
53 #include "utilities/bitMap.inline.hpp"
54 #include "utilities/formatBuffer.hpp"
55 #include "utilities/hashtable.inline.hpp"
56
57 ArchiveBuilder* ArchiveBuilder::_current = NULL;
58
59 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
60 char* newtop = ArchiveBuilder::current()->_ro_region.top();
61 ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
62 }
63
64 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K) {
203 GatherKlassesAndSymbols(ArchiveBuilder* builder) : _builder(builder) {}
204
205 virtual bool do_unique_ref(Ref* ref, bool read_only) {
206 return _builder->gather_klass_and_symbol(ref, read_only);
207 }
208 };
209
210 bool ArchiveBuilder::gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only) {
211 if (ref->obj() == NULL) {
212 return false;
213 }
214 if (get_follow_mode(ref) != make_a_copy) {
215 return false;
216 }
217 if (ref->msotype() == MetaspaceObj::ClassType) {
218 Klass* klass = (Klass*)ref->obj();
219 assert(klass->is_klass(), "must be");
220 if (!is_excluded(klass)) {
221 _klasses->append(klass);
222 }
223 // See ArchiveBuilder::make_shallow_copies: make sure we have enough space for both maximum
224 // Klass alignment as well as the RuntimeInfo* pointer we will embed in front of a Klass.
225 _estimated_metaspaceobj_bytes += align_up(BytesPerWord, KlassAlignmentInBytes) +
226 align_up(sizeof(void*), SharedSpaceObjectAlignment);
227 } else if (ref->msotype() == MetaspaceObj::SymbolType) {
228 // Make sure the symbol won't be GC'ed while we are dumping the archive.
229 Symbol* sym = (Symbol*)ref->obj();
230 sym->increment_refcount();
231 _symbols->append(sym);
232 }
233
234 int bytes = ref->size() * BytesPerWord;
235 _estimated_metaspaceobj_bytes += align_up(bytes, SharedSpaceObjectAlignment);
236
237 return true; // recurse
238 }
239
240 void ArchiveBuilder::gather_klasses_and_symbols() {
241 ResourceMark rm;
242 log_info(cds)("Gathering classes and symbols ... ");
243 GatherKlassesAndSymbols doit(this);
244 iterate_roots(&doit, /*is_relocating_pointers=*/false);
245 #if INCLUDE_CDS_JAVA_HEAP
246 if (is_dumping_full_module_graph()) {
603 }
604
605 void ArchiveBuilder::make_shallow_copies(DumpRegion *dump_region,
606 const ArchiveBuilder::SourceObjList* src_objs) {
607 for (int i = 0; i < src_objs->objs()->length(); i++) {
608 make_shallow_copy(dump_region, src_objs->objs()->at(i));
609 }
610 log_info(cds)("done (%d objects)", src_objs->objs()->length());
611 }
612
613 void ArchiveBuilder::make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info) {
614 MetaspaceClosure::Ref* ref = src_info->ref();
615 address src = ref->obj();
616 int bytes = src_info->size_in_bytes();
617 char* dest;
618 char* oldtop;
619 char* newtop;
620
621 oldtop = dump_region->top();
622 if (ref->msotype() == MetaspaceObj::ClassType) {
623 // Reserve space for a pointer immediately in front of an InstanceKlass. That space will
624 // later be used to store the RuntimeClassInfo* pointer directly in front of the archived
625 // InstanceKlass, in order to have a quick lookup InstanceKlass* -> RunTimeClassInfo*
626 // without building another hashtable. See RunTimeClassInfo::get_for()/::set_for() for
627 // details.
628 Klass* klass = (Klass*)src;
629 if (klass->is_instance_klass()) {
630 SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass));
631 dump_region->allocate(sizeof(address));
632 }
633 dest = dump_region->allocate(bytes, KlassAlignmentInBytes);
634 } else {
635 dest = dump_region->allocate(bytes);
636 }
637 newtop = dump_region->top();
638
639 memcpy(dest, src, bytes);
640
641 intptr_t* archived_vtable = CppVtables::get_archived_vtable(ref->msotype(), (address)dest);
642 if (archived_vtable != NULL) {
643 *(address*)dest = (address)archived_vtable;
644 ArchivePtrMarker::mark_pointer((address*)dest);
645 }
646
647 log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d (%s)", p2i(src), p2i(dest), bytes,
648 MetaspaceObj::type_name(ref->msotype()));
649 src_info->set_dumped_addr((address)dest);
650
651 _alloc_stats.record(ref->msotype(), int(newtop - oldtop), src_info->read_only());
652
653 DEBUG_ONLY(_alloc_stats.verify((int)dump_region->used(), src_info->read_only()));
654 }
655
656 address ArchiveBuilder::get_dumped_addr(address src_obj) const {
657 SourceObjInfo* p = _src_obj_table.get(src_obj);
658 assert(p != NULL, "must be");
659
660 return p->dumped_addr();
661 }
662
663 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
664 for (int i = 0; i < src_objs->objs()->length(); i++) {
665 src_objs->relocate(i, this);
666 }
667 }
668
669 void ArchiveBuilder::update_special_refs() {
670 for (int i = 0; i < _special_refs->length(); i++) {
671 SpecialRefInfo s = _special_refs->at(i);
672 size_t field_offset = s.field_offset();
673 address src_obj = s.src_obj();
722 vmClasses::metaspace_pointers_do(&doit);
723 }
724
725 void ArchiveBuilder::make_klasses_shareable() {
726 int num_instance_klasses = 0;
727 int num_boot_klasses = 0;
728 int num_platform_klasses = 0;
729 int num_app_klasses = 0;
730 int num_hidden_klasses = 0;
731 int num_unlinked_klasses = 0;
732 int num_unregistered_klasses = 0;
733 int num_obj_array_klasses = 0;
734 int num_type_array_klasses = 0;
735
736 for (int i = 0; i < klasses()->length(); i++) {
737 const char* type;
738 const char* unlinked = "";
739 const char* hidden = "";
740 Klass* k = klasses()->at(i);
741 k->remove_java_mirror();
742 Klass* requested_k = to_requested(k);
743 #ifdef _LP64
744 narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
745 k->set_prototype_header(markWord::prototype().set_narrow_klass(nk));
746 #else
747 k->set_prototype_header(markWord::prototype());
748 #endif
749 if (k->is_objArray_klass()) {
750 // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
751 // on their array classes.
752 num_obj_array_klasses ++;
753 type = "array";
754 } else if (k->is_typeArray_klass()) {
755 num_type_array_klasses ++;
756 type = "array";
757 k->remove_unshareable_info();
758 } else {
759 assert(k->is_instance_klass(), " must be");
760 num_instance_klasses ++;
761 InstanceKlass* ik = InstanceKlass::cast(k);
762 if (DynamicDumpSharedSpaces) {
763 // For static dump, class loader type are already set.
764 ik->assign_class_loader_type();
765 }
766 if (ik->is_shared_boot_class()) {
767 type = "boot";
768 num_boot_klasses ++;
815 address requested_p = to_requested(p);
816 assert(requested_p >= _requested_static_archive_bottom, "must be");
817 return requested_p - _requested_static_archive_bottom;
818 }
819
820 uintx ArchiveBuilder::any_to_offset(address p) const {
821 if (is_in_mapped_static_archive(p)) {
822 assert(DynamicDumpSharedSpaces, "must be");
823 return p - _mapped_static_archive_bottom;
824 }
825 return buffer_to_offset(p);
826 }
827
828 // Update a Java object to point its Klass* to the new location after
829 // shared archive has been compacted.
830 void ArchiveBuilder::relocate_klass_ptr(oop o) {
831 assert(DumpSharedSpaces, "sanity");
832 Klass* k = get_relocated_klass(o->klass());
833 Klass* requested_k = to_requested(k);
834 narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
835 #ifdef _LP64
836 o->set_mark(o->mark().set_narrow_klass(nk));
837 #endif
838 }
839
840 // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
841 // so that the archive can be mapped to the "requested" location without runtime relocation.
842 //
843 // - See ArchiveBuilder header for the definition of "buffer", "mapped" and "requested"
844 // - ArchivePtrMarker::ptrmap() marks all the pointers in the rw/ro regions
845 // - Every pointer must have one of the following values:
846 // [a] NULL:
847 // No relocation is needed. Remove this pointer from ptrmap so we don't need to
848 // consider it at runtime.
849 // [b] Points into an object X which is inside the buffer:
850 // Adjust this pointer by _buffer_to_requested_delta, so it points to X
851 // when the archive is mapped at the requested location.
852 // [c] Points into an object Y which is inside mapped static archive:
853 // - This happens only during dynamic dump
854 // - Adjust this pointer by _mapped_to_requested_static_archive_delta,
855 // so it points to Y when the static archive is mapped at the requested location.
856 template <bool STATIC_DUMP>
857 class RelocateBufferToRequested : public BitMapClosure {
|