10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "cds/archiveBuilder.hpp"
27 #include "cds/archiveHeapWriter.hpp"
28 #include "cds/archiveUtils.hpp"
29 #include "cds/cdsConfig.hpp"
30 #include "cds/cppVtables.hpp"
31 #include "cds/dumpAllocStats.hpp"
32 #include "cds/dynamicArchive.hpp"
33 #include "cds/heapShared.hpp"
34 #include "cds/metaspaceShared.hpp"
35 #include "cds/regeneratedClasses.hpp"
36 #include "classfile/classLoaderDataShared.hpp"
37 #include "classfile/javaClasses.hpp"
38 #include "classfile/symbolTable.hpp"
39 #include "classfile/systemDictionaryShared.hpp"
40 #include "classfile/vmClasses.hpp"
41 #include "interpreter/abstractInterpreter.hpp"
42 #include "jvm.h"
43 #include "logging/log.hpp"
44 #include "logging/logStream.hpp"
45 #include "memory/allStatic.hpp"
46 #include "memory/memRegion.hpp"
47 #include "memory/resourceArea.hpp"
48 #include "oops/compressedKlass.inline.hpp"
49 #include "oops/instanceKlass.hpp"
50 #include "oops/objArrayKlass.hpp"
51 #include "oops/objArrayOop.inline.hpp"
52 #include "oops/oopHandle.inline.hpp"
53 #include "runtime/arguments.hpp"
54 #include "runtime/fieldDescriptor.inline.hpp"
55 #include "runtime/globals_extension.hpp"
56 #include "runtime/javaThread.hpp"
57 #include "runtime/sharedRuntime.hpp"
58 #include "utilities/align.hpp"
59 #include "utilities/bitMap.inline.hpp"
60 #include "utilities/formatBuffer.hpp"
61
62 ArchiveBuilder* ArchiveBuilder::_current = nullptr;
63
64 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
65 char* newtop = ArchiveBuilder::current()->_ro_region.top();
66 ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
67 }
68
69 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K, mtClassShared) {
70 _total_bytes = 0;
71 _objs = new (mtClassShared) GrowableArray<SourceObjInfo*>(128 * K, mtClassShared);
72 }
143
144 RelocateEmbeddedPointers relocator(builder, src_info->buffered_addr(), start);
145 _ptrmap.iterate(&relocator, start, end);
146 }
147
148 ArchiveBuilder::ArchiveBuilder() :
149 _current_dump_region(nullptr),
150 _buffer_bottom(nullptr),
151 _last_verified_top(nullptr),
152 _num_dump_regions_used(0),
153 _other_region_used_bytes(0),
154 _requested_static_archive_bottom(nullptr),
155 _requested_static_archive_top(nullptr),
156 _requested_dynamic_archive_bottom(nullptr),
157 _requested_dynamic_archive_top(nullptr),
158 _mapped_static_archive_bottom(nullptr),
159 _mapped_static_archive_top(nullptr),
160 _buffer_to_requested_delta(0),
161 _rw_region("rw", MAX_SHARED_DELTA),
162 _ro_region("ro", MAX_SHARED_DELTA),
163 _ptrmap(mtClassShared),
164 _rw_ptrmap(mtClassShared),
165 _ro_ptrmap(mtClassShared),
166 _rw_src_objs(),
167 _ro_src_objs(),
168 _src_obj_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
169 _buffered_to_src_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
170 _total_heap_region_size(0),
171 _estimated_metaspaceobj_bytes(0),
172 _estimated_hashtable_bytes(0)
173 {
174 _klasses = new (mtClassShared) GrowableArray<Klass*>(4 * K, mtClassShared);
175 _symbols = new (mtClassShared) GrowableArray<Symbol*>(256 * K, mtClassShared);
176 _entropy_seed = 0x12345678;
177 assert(_current == nullptr, "must be");
178 _current = this;
179 }
180
181 ArchiveBuilder::~ArchiveBuilder() {
182 assert(_current == this, "must be");
183 _current = nullptr;
184
185 for (int i = 0; i < _symbols->length(); i++) {
209 public:
210 GatherKlassesAndSymbols(ArchiveBuilder* builder) : _builder(builder) {}
211
212 virtual bool do_unique_ref(Ref* ref, bool read_only) {
213 return _builder->gather_klass_and_symbol(ref, read_only);
214 }
215 };
216
217 bool ArchiveBuilder::gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only) {
218 if (ref->obj() == nullptr) {
219 return false;
220 }
221 if (get_follow_mode(ref) != make_a_copy) {
222 return false;
223 }
224 if (ref->msotype() == MetaspaceObj::ClassType) {
225 Klass* klass = (Klass*)ref->obj();
226 assert(klass->is_klass(), "must be");
227 if (!is_excluded(klass)) {
228 _klasses->append(klass);
229 }
230 // See RunTimeClassInfo::get_for()
231 _estimated_metaspaceobj_bytes += align_up(BytesPerWord, SharedSpaceObjectAlignment);
232 } else if (ref->msotype() == MetaspaceObj::SymbolType) {
233 // Make sure the symbol won't be GC'ed while we are dumping the archive.
234 Symbol* sym = (Symbol*)ref->obj();
235 sym->increment_refcount();
236 _symbols->append(sym);
237 }
238
239 int bytes = ref->size() * BytesPerWord;
240 _estimated_metaspaceobj_bytes += align_up(bytes, SharedSpaceObjectAlignment);
241
242 return true; // recurse
243 }
244
245 void ArchiveBuilder::gather_klasses_and_symbols() {
246 ResourceMark rm;
247 log_info(cds)("Gathering classes and symbols ... ");
248 GatherKlassesAndSymbols doit(this);
267 // During -Xshare:dump, the order of Symbol creation is strictly determined by
268 // the SharedClassListFile (class loading is done in a single thread and the JIT
269 // is disabled). Also, Symbols are allocated in monotonically increasing addresses
270 // (see Symbol::operator new(size_t, int)). So if we iterate the Symbols by
271 // ascending address order, we ensure that all Symbols are copied into deterministic
272 // locations in the archive.
273 //
274 // TODO: in the future, if we want to produce deterministic contents in the
275 // dynamic archive, we might need to sort the symbols alphabetically (also see
276 // DynamicArchiveBuilder::sort_methods()).
277 log_info(cds)("Sorting symbols ... ");
278 _symbols->sort(compare_symbols_by_address);
279 sort_klasses();
280
281 // TODO -- we need a proper estimate for the archived modules, etc,
282 // but this should be enough for now
283 _estimated_metaspaceobj_bytes += 200 * 1024 * 1024;
284 }
285 }
286
287 int ArchiveBuilder::compare_symbols_by_address(Symbol** a, Symbol** b) {
288 if (a[0] < b[0]) {
289 return -1;
290 } else {
291 assert(a[0] > b[0], "Duplicated symbol %s unexpected", (*a)->as_C_string());
292 return 1;
293 }
294 }
295
296 int ArchiveBuilder::compare_klass_by_name(Klass** a, Klass** b) {
297 return a[0]->name()->fast_compare(b[0]->name());
298 }
299
300 void ArchiveBuilder::sort_klasses() {
301 log_info(cds)("Sorting classes ... ");
302 _klasses->sort(compare_klass_by_name);
303 }
304
305 size_t ArchiveBuilder::estimate_archive_size() {
306 // size of the symbol table and two dictionaries, plus the RunTimeClassInfo's
307 size_t symbol_table_est = SymbolTable::estimate_size_for_archive();
308 size_t dictionary_est = SystemDictionaryShared::estimate_size_for_archive();
309 _estimated_hashtable_bytes = symbol_table_est + dictionary_est;
310
311 size_t total = 0;
312
313 total += _estimated_metaspaceobj_bytes;
314 total += _estimated_hashtable_bytes;
315
316 // allow fragmentation at the end of each dump region
317 total += _total_dump_regions * MetaspaceShared::core_region_alignment();
318
319 log_info(cds)("_estimated_hashtable_bytes = " SIZE_FORMAT " + " SIZE_FORMAT " = " SIZE_FORMAT,
320 symbol_table_est, dictionary_est, _estimated_hashtable_bytes);
321 log_info(cds)("_estimated_metaspaceobj_bytes = " SIZE_FORMAT, _estimated_metaspaceobj_bytes);
322 log_info(cds)("total estimate bytes = " SIZE_FORMAT, total);
323
324 return align_up(total, MetaspaceShared::core_region_alignment());
325 }
326
327 address ArchiveBuilder::reserve_buffer() {
328 size_t buffer_size = estimate_archive_size();
329 ReservedSpace rs(buffer_size, MetaspaceShared::core_region_alignment(), os::vm_page_size());
330 if (!rs.is_reserved()) {
404
405 iterate_roots(it);
406 }
407
408 class GatherSortedSourceObjs : public MetaspaceClosure {
409 ArchiveBuilder* _builder;
410
411 public:
412 GatherSortedSourceObjs(ArchiveBuilder* builder) : _builder(builder) {}
413
414 virtual bool do_ref(Ref* ref, bool read_only) {
415 return _builder->gather_one_source_obj(ref, read_only);
416 }
417 };
418
419 bool ArchiveBuilder::gather_one_source_obj(MetaspaceClosure::Ref* ref, bool read_only) {
420 address src_obj = ref->obj();
421 if (src_obj == nullptr) {
422 return false;
423 }
424 if (RegeneratedClasses::has_been_regenerated(src_obj)) {
425 // No need to copy it. We will later relocate it to point to the regenerated klass/method.
426 return false;
427 }
428 remember_embedded_pointer_in_enclosing_obj(ref);
429
430 FollowMode follow_mode = get_follow_mode(ref);
431 SourceObjInfo src_info(ref, read_only, follow_mode);
432 bool created;
433 SourceObjInfo* p = _src_obj_table.put_if_absent(src_obj, src_info, &created);
434 if (created) {
435 if (_src_obj_table.maybe_grow()) {
436 log_info(cds, hashtables)("Expanded _src_obj_table table to %d", _src_obj_table.table_size());
437 }
438 }
439
440 #ifdef ASSERT
441 if (ref->msotype() == MetaspaceObj::MethodType) {
442 Method* m = (Method*)ref->obj();
443 assert(!RegeneratedClasses::has_been_regenerated((address)m->method_holder()),
444 "Should not archive methods in a class that has been regenerated");
445 }
446 #endif
447
448 assert(p->read_only() == src_info.read_only(), "must be");
508 _rw_src_objs.remember_embedded_pointer(src_info, ref);
509 }
510 }
511 }
512
513 void ArchiveBuilder::gather_source_objs() {
514 ResourceMark rm;
515 log_info(cds)("Gathering all archivable objects ... ");
516 gather_klasses_and_symbols();
517 GatherSortedSourceObjs doit(this);
518 iterate_sorted_roots(&doit);
519 doit.finish();
520 }
521
522 bool ArchiveBuilder::is_excluded(Klass* klass) {
523 if (klass->is_instance_klass()) {
524 InstanceKlass* ik = InstanceKlass::cast(klass);
525 return SystemDictionaryShared::is_excluded_class(ik);
526 } else if (klass->is_objArray_klass()) {
527 Klass* bottom = ObjArrayKlass::cast(klass)->bottom_klass();
528 if (MetaspaceShared::is_shared_static(bottom)) {
529 // The bottom class is in the static archive so it's clearly not excluded.
530 assert(CDSConfig::is_dumping_dynamic_archive(), "sanity");
531 return false;
532 } else if (bottom->is_instance_klass()) {
533 return SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(bottom));
534 }
535 }
536
537 return false;
538 }
539
540 ArchiveBuilder::FollowMode ArchiveBuilder::get_follow_mode(MetaspaceClosure::Ref *ref) {
541 address obj = ref->obj();
542 if (MetaspaceShared::is_in_shared_metaspace(obj)) {
543 // Don't dump existing shared metadata again.
544 return point_to_it;
545 } else if (ref->msotype() == MetaspaceObj::MethodDataType ||
546 ref->msotype() == MetaspaceObj::MethodCountersType) {
547 return set_to_null;
548 } else {
549 if (ref->msotype() == MetaspaceObj::ClassType) {
550 Klass* klass = (Klass*)ref->obj();
551 assert(klass->is_klass(), "must be");
552 if (is_excluded(klass)) {
553 ResourceMark rm;
554 log_debug(cds, dynamic)("Skipping class (excluded): %s", klass->external_name());
555 return set_to_null;
556 }
557 }
558
559 return make_a_copy;
560 }
561 }
562
563 void ArchiveBuilder::start_dump_region(DumpRegion* next) {
564 address bottom = _last_verified_top;
565 address top = (address)(current_dump_region()->top());
566 _other_region_used_bytes += size_t(top - bottom);
567
716 ArchivePtrMarker::mark_pointer(ptr_location);
717 }
718 }
719
720 void ArchiveBuilder::mark_and_relocate_to_buffered_addr(address* ptr_location) {
721 assert(*ptr_location != nullptr, "sanity");
722 if (!is_in_mapped_static_archive(*ptr_location)) {
723 *ptr_location = get_buffered_addr(*ptr_location);
724 }
725 ArchivePtrMarker::mark_pointer(ptr_location);
726 }
727
728 address ArchiveBuilder::get_buffered_addr(address src_addr) const {
729 SourceObjInfo* p = _src_obj_table.get(src_addr);
730 assert(p != nullptr, "src_addr " INTPTR_FORMAT " is used but has not been archived",
731 p2i(src_addr));
732
733 return p->buffered_addr();
734 }
735
736 address ArchiveBuilder::get_source_addr(address buffered_addr) const {
737 assert(is_in_buffer_space(buffered_addr), "must be");
738 address* src_p = _buffered_to_src_table.get(buffered_addr);
739 assert(src_p != nullptr && *src_p != nullptr, "must be");
740 return *src_p;
741 }
742
743 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
744 for (int i = 0; i < src_objs->objs()->length(); i++) {
745 src_objs->relocate(i, this);
746 }
747 }
748
749 void ArchiveBuilder::relocate_metaspaceobj_embedded_pointers() {
750 log_info(cds)("Relocating embedded pointers in core regions ... ");
751 relocate_embedded_pointers(&_rw_src_objs);
752 relocate_embedded_pointers(&_ro_src_objs);
753 }
754
755 void ArchiveBuilder::make_klasses_shareable() {
756 int num_instance_klasses = 0;
757 int num_boot_klasses = 0;
758 int num_platform_klasses = 0;
759 int num_app_klasses = 0;
760 int num_hidden_klasses = 0;
761 int num_unlinked_klasses = 0;
762 int num_unregistered_klasses = 0;
763 int num_obj_array_klasses = 0;
764 int num_type_array_klasses = 0;
765
766 for (int i = 0; i < klasses()->length(); i++) {
767 // Some of the code in ConstantPool::remove_unshareable_info() requires the classes
768 // to be in linked state, so it must be call here before the next loop, which returns
769 // all classes to unlinked state.
770 Klass* k = get_buffered_addr(klasses()->at(i));
771 if (k->is_instance_klass()) {
772 InstanceKlass::cast(k)->constants()->remove_unshareable_info();
773 }
774 }
775
776 for (int i = 0; i < klasses()->length(); i++) {
777 const char* type;
778 const char* unlinked = "";
779 const char* hidden = "";
780 const char* generated = "";
781 Klass* k = get_buffered_addr(klasses()->at(i));
782 k->remove_java_mirror();
783 if (k->is_objArray_klass()) {
784 // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
785 // on their array classes.
786 num_obj_array_klasses ++;
787 type = "array";
788 } else if (k->is_typeArray_klass()) {
789 num_type_array_klasses ++;
790 type = "array";
791 k->remove_unshareable_info();
792 } else {
793 assert(k->is_instance_klass(), " must be");
794 num_instance_klasses ++;
795 InstanceKlass* ik = InstanceKlass::cast(k);
796 if (ik->is_shared_boot_class()) {
797 type = "boot";
798 num_boot_klasses ++;
799 } else if (ik->is_shared_platform_class()) {
800 type = "plat";
801 num_platform_klasses ++;
802 } else if (ik->is_shared_app_class()) {
803 type = "app";
804 num_app_klasses ++;
805 } else {
806 assert(ik->is_shared_unregistered_class(), "must be");
807 type = "unreg";
808 num_unregistered_klasses ++;
809 }
810
811 if (!ik->is_linked()) {
812 num_unlinked_klasses ++;
813 unlinked = " ** unlinked";
814 }
815
816 if (ik->is_hidden()) {
817 num_hidden_klasses ++;
818 hidden = " ** hidden";
819 }
820
821 if (ik->is_generated_shared_class()) {
822 generated = " ** generated";
823 }
824 MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread::current(), ik);
825 ik->remove_unshareable_info();
826 }
827
828 if (log_is_enabled(Debug, cds, class)) {
829 ResourceMark rm;
830 log_debug(cds, class)("klasses[%5d] = " PTR_FORMAT " %-5s %s%s%s%s", i,
831 p2i(to_requested(k)), type, k->external_name(),
832 hidden, unlinked, generated);
833 }
834 }
835
836 log_info(cds)("Number of classes %d", num_instance_klasses + num_obj_array_klasses + num_type_array_klasses);
837 log_info(cds)(" instance classes = %5d", num_instance_klasses);
838 log_info(cds)(" boot = %5d", num_boot_klasses);
839 log_info(cds)(" app = %5d", num_app_klasses);
840 log_info(cds)(" platform = %5d", num_platform_klasses);
841 log_info(cds)(" unregistered = %5d", num_unregistered_klasses);
842 log_info(cds)(" (hidden) = %5d", num_hidden_klasses);
843 log_info(cds)(" (unlinked) = %5d", num_unlinked_klasses);
844 log_info(cds)(" obj array classes = %5d", num_obj_array_klasses);
845 log_info(cds)(" type array classes = %5d", num_type_array_klasses);
846 log_info(cds)(" symbols = %5d", _symbols->length());
847
848 DynamicArchive::make_array_klasses_shareable();
849 }
850
851 void ArchiveBuilder::serialize_dynamic_archivable_items(SerializeClosure* soc) {
852 SymbolTable::serialize_shared_table_header(soc, false);
853 SystemDictionaryShared::serialize_dictionary_headers(soc, false);
854 DynamicArchive::serialize_array_klasses(soc);
855 }
856
857 uintx ArchiveBuilder::buffer_to_offset(address p) const {
858 address requested_p = to_requested(p);
859 assert(requested_p >= _requested_static_archive_bottom, "must be");
860 return requested_p - _requested_static_archive_bottom;
861 }
862
863 uintx ArchiveBuilder::any_to_offset(address p) const {
864 if (is_in_mapped_static_archive(p)) {
865 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
866 return p - _mapped_static_archive_bottom;
867 }
868 if (!is_in_buffer_space(p)) {
869 // p must be a "source" address
870 p = get_buffered_addr(p);
871 }
872 return buffer_to_offset(p);
873 }
874
875 #if INCLUDE_CDS_JAVA_HEAP
876 narrowKlass ArchiveBuilder::get_requested_narrow_klass(Klass* k) {
877 assert(CDSConfig::is_dumping_heap(), "sanity");
878 k = get_buffered_klass(k);
879 Klass* requested_k = to_requested(k);
880 address narrow_klass_base = _requested_static_archive_bottom; // runtime encoding base == runtime mapping start
881 const int narrow_klass_shift = ArchiveHeapWriter::precomputed_narrow_klass_shift;
882 return CompressedKlassPointers::encode_not_null(requested_k, narrow_klass_base, narrow_klass_shift);
883 }
884 #endif // INCLUDE_CDS_JAVA_HEAP
885
886 // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
887 // so that the archive can be mapped to the "requested" location without runtime relocation.
888 //
889 // - See ArchiveBuilder header for the definition of "buffer", "mapped" and "requested"
890 // - ArchivePtrMarker::ptrmap() marks all the pointers in the rw/ro regions
891 // - Every pointer must have one of the following values:
892 // [a] nullptr:
893 // No relocation is needed. Remove this pointer from ptrmap so we don't need to
894 // consider it at runtime.
944 } else {
945 assert(_builder->is_in_mapped_static_archive(*p), "old pointer must point inside buffer space or mapped static archive");
946 *p += _mapped_to_requested_static_archive_delta;
947 assert(_builder->is_in_requested_static_archive(*p), "new pointer must point inside requested archive");
948 }
949 }
950 _max_non_null_offset = offset;
951 }
952
953 return true; // keep iterating
954 }
955
956 void doit() {
957 ArchivePtrMarker::ptrmap()->iterate(this);
958 ArchivePtrMarker::compact(_max_non_null_offset);
959 }
960 };
961
962
963 void ArchiveBuilder::relocate_to_requested() {
964 ro_region()->pack();
965
966 size_t my_archive_size = buffer_top() - buffer_bottom();
967
968 if (CDSConfig::is_dumping_static_archive()) {
969 _requested_static_archive_top = _requested_static_archive_bottom + my_archive_size;
970 RelocateBufferToRequested<true> patcher(this);
971 patcher.doit();
972 } else {
973 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
974 _requested_dynamic_archive_top = _requested_dynamic_archive_bottom + my_archive_size;
975 RelocateBufferToRequested<false> patcher(this);
976 patcher.doit();
977 }
978 }
979
980 // Write detailed info to a mapfile to analyze contents of the archive.
981 // static dump:
982 // java -Xshare:dump -Xlog:cds+map=trace:file=cds.map:none:filesize=0
983 // dynamic dump:
984 // java -cp MyApp.jar -XX:ArchiveClassesAtExit=MyApp.jsa \
1092 base = requested_base;
1093 top = requested_base + size;
1094 log_info(cds, map)("[%-18s " PTR_FORMAT " - " PTR_FORMAT " " SIZE_FORMAT_W(9) " bytes]",
1095 name, p2i(base), p2i(top), size);
1096 }
1097
1098 #if INCLUDE_CDS_JAVA_HEAP
1099 static void log_heap_region(ArchiveHeapInfo* heap_info) {
1100 MemRegion r = heap_info->buffer_region();
1101 address start = address(r.start()); // start of the current oop inside the buffer
1102 address end = address(r.end());
1103 log_region("heap", start, end, ArchiveHeapWriter::buffered_addr_to_requested_addr(start));
1104
1105 LogStreamHandle(Info, cds, map) st;
1106
1107 while (start < end) {
1108 size_t byte_size;
1109 oop source_oop = ArchiveHeapWriter::buffered_addr_to_source_obj(start);
1110 address requested_start = ArchiveHeapWriter::buffered_addr_to_requested_addr(start);
1111 st.print(PTR_FORMAT ": @@ Object ", p2i(requested_start));
1112
1113 if (source_oop != nullptr) {
1114 // This is a regular oop that got archived.
1115 print_oop_with_requested_addr_cr(&st, source_oop, false);
1116 byte_size = source_oop->size() * BytesPerWord;
1117 } else if (start == ArchiveHeapWriter::buffered_heap_roots_addr()) {
1118 // HeapShared::roots() is copied specially, so it doesn't exist in
1119 // ArchiveHeapWriter::BufferOffsetToSourceObjectTable.
1120 // See ArchiveHeapWriter::copy_roots_to_buffer().
1121 st.print_cr("HeapShared::roots[%d]", HeapShared::pending_roots()->length());
1122 byte_size = ArchiveHeapWriter::heap_roots_word_size() * BytesPerWord;
1123 } else if ((byte_size = ArchiveHeapWriter::get_filler_size_at(start)) > 0) {
1124 // We have a filler oop, which also does not exist in BufferOffsetToSourceObjectTable.
1125 st.print_cr("filler " SIZE_FORMAT " bytes", byte_size);
1126 } else {
1127 ShouldNotReachHere();
1128 }
1129
1130 address oop_end = start + byte_size;
1131 log_as_hex(start, oop_end, requested_start, /*is_heap=*/true);
1132
1133 if (source_oop != nullptr) {
1134 log_oop_details(heap_info, source_oop, /*buffered_addr=*/start);
1135 } else if (start == ArchiveHeapWriter::buffered_heap_roots_addr()) {
1136 log_heap_roots();
1137 }
1138 start = oop_end;
1139 }
1140 }
1141
1142 // ArchivedFieldPrinter is used to print the fields of archived objects. We can't
1143 // use _source_obj->print_on(), because we want to print the oop fields
1144 // in _source_obj with their requested addresses using print_oop_with_requested_addr_cr().
1145 class ArchivedFieldPrinter : public FieldClosure {
1146 ArchiveHeapInfo* _heap_info;
1147 outputStream* _st;
1148 oop _source_obj;
1149 address _buffered_addr;
1150 public:
1151 ArchivedFieldPrinter(ArchiveHeapInfo* heap_info, outputStream* st, oop src_obj, address buffered_addr) :
1152 _heap_info(heap_info), _st(st), _source_obj(src_obj), _buffered_addr(buffered_addr) {}
1153
1154 void do_field(fieldDescriptor* fd) {
1155 _st->print(" - ");
1156 BasicType ft = fd->field_type();
1203 ArchiveBuilder* builder = ArchiveBuilder::current();
1204 Klass* requested_klass = builder->to_requested(builder->get_buffered_addr(source_klass));
1205
1206 st.print(" - klass: ");
1207 source_klass->print_value_on(&st);
1208 st.print(" " PTR_FORMAT, p2i(requested_klass));
1209 st.cr();
1210
1211 if (source_oop->is_typeArray()) {
1212 TypeArrayKlass::cast(source_klass)->oop_print_elements_on(typeArrayOop(source_oop), &st);
1213 } else if (source_oop->is_objArray()) {
1214 objArrayOop source_obj_array = objArrayOop(source_oop);
1215 for (int i = 0; i < source_obj_array->length(); i++) {
1216 st.print(" -%4d: ", i);
1217 print_oop_with_requested_addr_cr(&st, source_obj_array->obj_at(i));
1218 }
1219 } else {
1220 st.print_cr(" - fields (" SIZE_FORMAT " words):", source_oop->size());
1221 ArchivedFieldPrinter print_field(heap_info, &st, source_oop, buffered_addr);
1222 InstanceKlass::cast(source_klass)->print_nonstatic_fields(&print_field);
1223 }
1224 }
1225 }
1226
1227 static void log_heap_roots() {
1228 LogStreamHandle(Trace, cds, map, oops) st;
1229 if (st.is_enabled()) {
1230 for (int i = 0; i < HeapShared::pending_roots()->length(); i++) {
1231 st.print("roots[%4d]: ", i);
1232 print_oop_with_requested_addr_cr(&st, HeapShared::pending_roots()->at(i));
1233 }
1234 }
1235 }
1236
1237 // The output looks like this. The first number is the requested address. The second number is
1238 // the narrowOop version of the requested address.
1239 // 0x00000007ffc7e840 (0xfff8fd08) java.lang.Class
1240 // 0x00000007ffc000f8 (0xfff8001f) [B length: 11
1241 static void print_oop_with_requested_addr_cr(outputStream* st, oop source_oop, bool print_addr = true) {
1242 if (source_oop == nullptr) {
1243 st->print_cr("null");
1244 } else {
1245 ResourceMark rm;
1246 oop requested_obj = ArchiveHeapWriter::source_obj_to_requested_obj(source_oop);
1247 if (print_addr) {
1248 st->print(PTR_FORMAT " ", p2i(requested_obj));
1249 }
1250 if (UseCompressedOops) {
1251 st->print("(0x%08x) ", CompressedOops::narrow_oop_value(requested_obj));
1252 }
1253 if (source_oop->is_array()) {
1254 int array_len = arrayOop(source_oop)->length();
1255 st->print_cr("%s length: %d", source_oop->klass()->external_name(), array_len);
1256 } else {
1257 st->print_cr("%s", source_oop->klass()->external_name());
1258 }
1259 }
1260 }
1261 #endif // INCLUDE_CDS_JAVA_HEAP
1262
1263 // Log all the data [base...top). Pretend that the base address
1264 // will be mapped to requested_base at run-time.
1265 static void log_as_hex(address base, address top, address requested_base, bool is_heap = false) {
1266 assert(top >= base, "must be");
1267
1268 LogStreamHandle(Trace, cds, map) lsh;
1269 if (lsh.is_enabled()) {
1270 int unitsize = sizeof(address);
1271 if (is_heap && UseCompressedOops) {
1272 // This makes the compressed oop pointers easier to read, but
1273 // longs and doubles will be split into two words.
1274 unitsize = sizeof(narrowOop);
1275 }
1276 os::print_hex_dump(&lsh, base, top, unitsize, /* print_ascii=*/true, /* bytes_per_line=*/32, requested_base);
1277 }
1310 if (heap_info->is_used()) {
1311 log_heap_region(heap_info);
1312 }
1313 #endif
1314
1315 log_info(cds, map)("[End of CDS archive map]");
1316 }
1317 }; // end ArchiveBuilder::CDSMapLogger
1318
1319 void ArchiveBuilder::print_stats() {
1320 _alloc_stats.print_stats(int(_ro_region.used()), int(_rw_region.used()));
1321 }
1322
1323 void ArchiveBuilder::write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info) {
1324 // Make sure NUM_CDS_REGIONS (exported in cds.h) agrees with
1325 // MetaspaceShared::n_regions (internal to hotspot).
1326 assert(NUM_CDS_REGIONS == MetaspaceShared::n_regions, "sanity");
1327
1328 write_region(mapinfo, MetaspaceShared::rw, &_rw_region, /*read_only=*/false,/*allow_exec=*/false);
1329 write_region(mapinfo, MetaspaceShared::ro, &_ro_region, /*read_only=*/true, /*allow_exec=*/false);
1330
1331 // Split pointer map into read-write and read-only bitmaps
1332 ArchivePtrMarker::initialize_rw_ro_maps(&_rw_ptrmap, &_ro_ptrmap);
1333
1334 size_t bitmap_size_in_bytes;
1335 char* bitmap = mapinfo->write_bitmap_region(ArchivePtrMarker::rw_ptrmap(), ArchivePtrMarker::ro_ptrmap(), heap_info,
1336 bitmap_size_in_bytes);
1337
1338 if (heap_info->is_used()) {
1339 _total_heap_region_size = mapinfo->write_heap_region(heap_info);
1340 }
1341
1342 print_region_stats(mapinfo, heap_info);
1343
1344 mapinfo->set_requested_base((char*)MetaspaceShared::requested_base_address());
1345 mapinfo->set_header_crc(mapinfo->compute_header_crc());
1346 // After this point, we should not write any data into mapinfo->header() since this
1347 // would corrupt its checksum we have calculated before.
1348 mapinfo->write_header();
1349 mapinfo->close();
1350
1351 if (log_is_enabled(Info, cds)) {
1352 print_stats();
1353 }
1354
1355 if (log_is_enabled(Info, cds, map)) {
1361 }
1362
1363 void ArchiveBuilder::write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region, bool read_only, bool allow_exec) {
1364 mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1365 }
1366
1367 void ArchiveBuilder::print_region_stats(FileMapInfo *mapinfo, ArchiveHeapInfo* heap_info) {
1368 // Print statistics of all the regions
1369 const size_t bitmap_used = mapinfo->region_at(MetaspaceShared::bm)->used();
1370 const size_t bitmap_reserved = mapinfo->region_at(MetaspaceShared::bm)->used_aligned();
1371 const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
1372 bitmap_reserved +
1373 _total_heap_region_size;
1374 const size_t total_bytes = _ro_region.used() + _rw_region.used() +
1375 bitmap_used +
1376 _total_heap_region_size;
1377 const double total_u_perc = percent_of(total_bytes, total_reserved);
1378
1379 _rw_region.print(total_reserved);
1380 _ro_region.print(total_reserved);
1381
1382 print_bitmap_region_stats(bitmap_used, total_reserved);
1383
1384 if (heap_info->is_used()) {
1385 print_heap_region_stats(heap_info, total_reserved);
1386 }
1387
1388 log_debug(cds)("total : " SIZE_FORMAT_W(9) " [100.0%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used]",
1389 total_bytes, total_reserved, total_u_perc);
1390 }
1391
1392 void ArchiveBuilder::print_bitmap_region_stats(size_t size, size_t total_size) {
1393 log_debug(cds)("bm space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used]",
1394 size, size/double(total_size)*100.0, size);
1395 }
1396
1397 void ArchiveBuilder::print_heap_region_stats(ArchiveHeapInfo *info, size_t total_size) {
1398 char* start = info->buffer_start();
1399 size_t size = info->buffer_byte_size();
1400 char* top = start + size;
|
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "cds/archiveBuilder.hpp"
27 #include "cds/archiveHeapWriter.hpp"
28 #include "cds/archiveUtils.hpp"
29 #include "cds/cdsConfig.hpp"
30 #include "cds/classPrelinker.hpp"
31 #include "cds/classPreloader.hpp"
32 #include "cds/cppVtables.hpp"
33 #include "cds/dumpAllocStats.hpp"
34 #include "cds/dynamicArchive.hpp"
35 #include "cds/finalImageRecipes.hpp"
36 #include "cds/heapShared.hpp"
37 #include "cds/metaspaceShared.hpp"
38 #include "cds/regeneratedClasses.hpp"
39 #include "classfile/classLoader.hpp"
40 #include "classfile/classLoaderExt.hpp"
41 #include "classfile/classLoaderDataShared.hpp"
42 #include "classfile/javaClasses.hpp"
43 #include "classfile/symbolTable.hpp"
44 #include "classfile/systemDictionaryShared.hpp"
45 #include "classfile/vmClasses.hpp"
46 #include "interpreter/abstractInterpreter.hpp"
47 #include "jvm.h"
48 #include "logging/log.hpp"
49 #include "logging/logStream.hpp"
50 #include "memory/allStatic.hpp"
51 #include "memory/memRegion.hpp"
52 #include "memory/resourceArea.hpp"
53 #include "oops/compressedKlass.inline.hpp"
54 #include "oops/instanceKlass.hpp"
55 #include "oops/objArrayKlass.hpp"
56 #include "oops/objArrayOop.inline.hpp"
57 #include "oops/oopHandle.inline.hpp"
58 #include "oops/trainingData.hpp"
59 #include "runtime/arguments.hpp"
60 #include "runtime/fieldDescriptor.inline.hpp"
61 #include "runtime/globals_extension.hpp"
62 #include "runtime/javaThread.hpp"
63 #include "runtime/sharedRuntime.hpp"
64 #include "utilities/align.hpp"
65 #include "utilities/bitMap.inline.hpp"
66 #include "utilities/formatBuffer.hpp"
67
68 ArchiveBuilder* ArchiveBuilder::_current = nullptr;
69
70 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
71 char* newtop = ArchiveBuilder::current()->_ro_region.top();
72 ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
73 }
74
75 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K, mtClassShared) {
76 _total_bytes = 0;
77 _objs = new (mtClassShared) GrowableArray<SourceObjInfo*>(128 * K, mtClassShared);
78 }
149
150 RelocateEmbeddedPointers relocator(builder, src_info->buffered_addr(), start);
151 _ptrmap.iterate(&relocator, start, end);
152 }
153
154 ArchiveBuilder::ArchiveBuilder() :
155 _current_dump_region(nullptr),
156 _buffer_bottom(nullptr),
157 _last_verified_top(nullptr),
158 _num_dump_regions_used(0),
159 _other_region_used_bytes(0),
160 _requested_static_archive_bottom(nullptr),
161 _requested_static_archive_top(nullptr),
162 _requested_dynamic_archive_bottom(nullptr),
163 _requested_dynamic_archive_top(nullptr),
164 _mapped_static_archive_bottom(nullptr),
165 _mapped_static_archive_top(nullptr),
166 _buffer_to_requested_delta(0),
167 _rw_region("rw", MAX_SHARED_DELTA),
168 _ro_region("ro", MAX_SHARED_DELTA),
169 _cc_region("cc", MAX_SHARED_DELTA),
170 _ptrmap(mtClassShared),
171 _rw_ptrmap(mtClassShared),
172 _ro_ptrmap(mtClassShared),
173 _cc_ptrmap(mtClassShared),
174 _rw_src_objs(),
175 _ro_src_objs(),
176 _src_obj_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
177 _buffered_to_src_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
178 _total_heap_region_size(0),
179 _estimated_metaspaceobj_bytes(0),
180 _estimated_hashtable_bytes(0)
181 {
182 _klasses = new (mtClassShared) GrowableArray<Klass*>(4 * K, mtClassShared);
183 _symbols = new (mtClassShared) GrowableArray<Symbol*>(256 * K, mtClassShared);
184 _entropy_seed = 0x12345678;
185 assert(_current == nullptr, "must be");
186 _current = this;
187 }
188
189 ArchiveBuilder::~ArchiveBuilder() {
190 assert(_current == this, "must be");
191 _current = nullptr;
192
193 for (int i = 0; i < _symbols->length(); i++) {
217 public:
218 GatherKlassesAndSymbols(ArchiveBuilder* builder) : _builder(builder) {}
219
220 virtual bool do_unique_ref(Ref* ref, bool read_only) {
221 return _builder->gather_klass_and_symbol(ref, read_only);
222 }
223 };
224
225 bool ArchiveBuilder::gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only) {
226 if (ref->obj() == nullptr) {
227 return false;
228 }
229 if (get_follow_mode(ref) != make_a_copy) {
230 return false;
231 }
232 if (ref->msotype() == MetaspaceObj::ClassType) {
233 Klass* klass = (Klass*)ref->obj();
234 assert(klass->is_klass(), "must be");
235 if (!is_excluded(klass)) {
236 _klasses->append(klass);
237 if (klass->is_hidden() && klass->is_instance_klass()) {
238 update_hidden_class_loader_type(InstanceKlass::cast(klass));
239 }
240 }
241 // See RunTimeClassInfo::get_for()
242 _estimated_metaspaceobj_bytes += align_up(BytesPerWord, SharedSpaceObjectAlignment);
243 } else if (ref->msotype() == MetaspaceObj::SymbolType) {
244 // Make sure the symbol won't be GC'ed while we are dumping the archive.
245 Symbol* sym = (Symbol*)ref->obj();
246 sym->increment_refcount();
247 _symbols->append(sym);
248 }
249
250 int bytes = ref->size() * BytesPerWord;
251 _estimated_metaspaceobj_bytes += align_up(bytes, SharedSpaceObjectAlignment);
252
253 return true; // recurse
254 }
255
256 void ArchiveBuilder::gather_klasses_and_symbols() {
257 ResourceMark rm;
258 log_info(cds)("Gathering classes and symbols ... ");
259 GatherKlassesAndSymbols doit(this);
278 // During -Xshare:dump, the order of Symbol creation is strictly determined by
279 // the SharedClassListFile (class loading is done in a single thread and the JIT
280 // is disabled). Also, Symbols are allocated in monotonically increasing addresses
281 // (see Symbol::operator new(size_t, int)). So if we iterate the Symbols by
282 // ascending address order, we ensure that all Symbols are copied into deterministic
283 // locations in the archive.
284 //
285 // TODO: in the future, if we want to produce deterministic contents in the
286 // dynamic archive, we might need to sort the symbols alphabetically (also see
287 // DynamicArchiveBuilder::sort_methods()).
288 log_info(cds)("Sorting symbols ... ");
289 _symbols->sort(compare_symbols_by_address);
290 sort_klasses();
291
292 // TODO -- we need a proper estimate for the archived modules, etc,
293 // but this should be enough for now
294 _estimated_metaspaceobj_bytes += 200 * 1024 * 1024;
295 }
296 }
297
298 #if INCLUDE_CDS_JAVA_HEAP
299
300 void ArchiveBuilder::update_hidden_class_loader_type(InstanceKlass* ik) {
301 s2 classloader_type;
302 if (HeapShared::is_lambda_form_klass(ik)) {
303 assert(CDSConfig::is_dumping_invokedynamic(), "lambda form classes are archived only if CDSConfig::is_dumping_invokedynamic() is true");
304 classloader_type = ClassLoader::BOOT_LOADER;
305 } else if (SystemDictionaryShared::should_hidden_class_be_archived(ik)) {
306 oop loader = ik->class_loader();
307
308 if (loader == nullptr) {
309 classloader_type = ClassLoader::BOOT_LOADER;
310 } else if (SystemDictionary::is_platform_class_loader(loader)) {
311 classloader_type = ClassLoader::PLATFORM_LOADER;
312 } else if (SystemDictionary::is_system_class_loader(loader)) {
313 classloader_type = ClassLoader::APP_LOADER;
314 } else {
315 ShouldNotReachHere();
316 }
317 } else {
318 ShouldNotReachHere();
319 }
320
321 ik->set_shared_class_loader_type(classloader_type);
322 if (HeapShared::is_lambda_proxy_klass(ik)) {
323 InstanceKlass* nest_host = ik->nest_host_not_null();
324 ik->set_shared_classpath_index(nest_host->shared_classpath_index());
325 } else if (!HeapShared::is_lambda_form_klass(ik)) {
326 // Injected invoker classes: fake this for now. Probably not needed!
327 if (classloader_type == ClassLoader::APP_LOADER) {
328 ik->set_shared_classpath_index(ClassLoaderExt::app_class_paths_start_index()); // HACK
329 } else {
330 ik->set_shared_classpath_index(0);
331 }
332 }
333 }
334
335 #endif //INCLUDE_CDS_JAVA_HEAP
336
337 int ArchiveBuilder::compare_symbols_by_address(Symbol** a, Symbol** b) {
338 if (a[0] < b[0]) {
339 return -1;
340 } else {
341 assert(a[0] > b[0], "Duplicated symbol %s unexpected", (*a)->as_C_string());
342 return 1;
343 }
344 }
345
346 int ArchiveBuilder::compare_klass_by_name(Klass** a, Klass** b) {
347 return a[0]->name()->fast_compare(b[0]->name());
348 }
349
350 void ArchiveBuilder::sort_klasses() {
351 log_info(cds)("Sorting classes ... ");
352 _klasses->sort(compare_klass_by_name);
353 }
354
355 size_t ArchiveBuilder::estimate_archive_size() {
356 // size of the symbol table and two dictionaries, plus the RunTimeClassInfo's
357 size_t symbol_table_est = SymbolTable::estimate_size_for_archive();
358 size_t dictionary_est = SystemDictionaryShared::estimate_size_for_archive();
359 size_t training_data_est = TrainingData::estimate_size_for_archive();
360 _estimated_hashtable_bytes = symbol_table_est + dictionary_est + training_data_est;
361
362 if (CDSConfig::is_dumping_final_static_archive()) {
363 _estimated_hashtable_bytes += 200 * 1024 * 1024; // FIXME -- need to iterate archived symbols??
364 }
365
366 if (CDSConfig::is_dumping_dynamic_archive()) {
367 // Some extra space for traning data. Be generous. Unused areas will be trimmed from the archive file.
368 _estimated_hashtable_bytes += 200 * 1024 * 1024;
369 }
370 size_t total = 0;
371
372 total += _estimated_metaspaceobj_bytes;
373 total += _estimated_hashtable_bytes;
374
375 // allow fragmentation at the end of each dump region
376 total += _total_dump_regions * MetaspaceShared::core_region_alignment();
377
378 log_info(cds)("_estimated_hashtable_bytes = " SIZE_FORMAT " + " SIZE_FORMAT " = " SIZE_FORMAT,
379 symbol_table_est, dictionary_est, _estimated_hashtable_bytes);
380 log_info(cds)("_estimated_metaspaceobj_bytes = " SIZE_FORMAT, _estimated_metaspaceobj_bytes);
381 log_info(cds)("total estimate bytes = " SIZE_FORMAT, total);
382
383 return align_up(total, MetaspaceShared::core_region_alignment());
384 }
385
386 address ArchiveBuilder::reserve_buffer() {
387 size_t buffer_size = estimate_archive_size();
388 ReservedSpace rs(buffer_size, MetaspaceShared::core_region_alignment(), os::vm_page_size());
389 if (!rs.is_reserved()) {
463
464 iterate_roots(it);
465 }
466
467 class GatherSortedSourceObjs : public MetaspaceClosure {
468 ArchiveBuilder* _builder;
469
470 public:
471 GatherSortedSourceObjs(ArchiveBuilder* builder) : _builder(builder) {}
472
473 virtual bool do_ref(Ref* ref, bool read_only) {
474 return _builder->gather_one_source_obj(ref, read_only);
475 }
476 };
477
478 bool ArchiveBuilder::gather_one_source_obj(MetaspaceClosure::Ref* ref, bool read_only) {
479 address src_obj = ref->obj();
480 if (src_obj == nullptr) {
481 return false;
482 }
483
484 remember_embedded_pointer_in_enclosing_obj(ref);
485 if (RegeneratedClasses::has_been_regenerated(src_obj)) {
486 // No need to copy it. We will later relocate it to point to the regenerated klass/method.
487 return false;
488 }
489
490 FollowMode follow_mode = get_follow_mode(ref);
491 SourceObjInfo src_info(ref, read_only, follow_mode);
492 bool created;
493 SourceObjInfo* p = _src_obj_table.put_if_absent(src_obj, src_info, &created);
494 if (created) {
495 if (_src_obj_table.maybe_grow()) {
496 log_info(cds, hashtables)("Expanded _src_obj_table table to %d", _src_obj_table.table_size());
497 }
498 }
499
500 #ifdef ASSERT
501 if (ref->msotype() == MetaspaceObj::MethodType) {
502 Method* m = (Method*)ref->obj();
503 assert(!RegeneratedClasses::has_been_regenerated((address)m->method_holder()),
504 "Should not archive methods in a class that has been regenerated");
505 }
506 #endif
507
508 assert(p->read_only() == src_info.read_only(), "must be");
568 _rw_src_objs.remember_embedded_pointer(src_info, ref);
569 }
570 }
571 }
572
573 void ArchiveBuilder::gather_source_objs() {
574 ResourceMark rm;
575 log_info(cds)("Gathering all archivable objects ... ");
576 gather_klasses_and_symbols();
577 GatherSortedSourceObjs doit(this);
578 iterate_sorted_roots(&doit);
579 doit.finish();
580 }
581
582 bool ArchiveBuilder::is_excluded(Klass* klass) {
583 if (klass->is_instance_klass()) {
584 InstanceKlass* ik = InstanceKlass::cast(klass);
585 return SystemDictionaryShared::is_excluded_class(ik);
586 } else if (klass->is_objArray_klass()) {
587 Klass* bottom = ObjArrayKlass::cast(klass)->bottom_klass();
588 if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_shared_static(bottom)) {
589 // The bottom class is in the static archive so it's clearly not excluded.
590 assert(CDSConfig::is_dumping_dynamic_archive(), "sanity");
591 return false;
592 } else if (bottom->is_instance_klass()) {
593 return SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(bottom));
594 }
595 }
596
597 return false;
598 }
599
600 ArchiveBuilder::FollowMode ArchiveBuilder::get_follow_mode(MetaspaceClosure::Ref *ref) {
601 address obj = ref->obj();
602 if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_in_shared_metaspace(obj)) {
603 // Don't dump existing shared metadata again.
604 return point_to_it;
605 } else if (ref->msotype() == MetaspaceObj::MethodDataType ||
606 ref->msotype() == MetaspaceObj::MethodCountersType ||
607 ref->msotype() == MetaspaceObj::KlassTrainingDataType ||
608 ref->msotype() == MetaspaceObj::MethodTrainingDataType ||
609 ref->msotype() == MetaspaceObj::CompileTrainingDataType) {
610 return TrainingData::need_data() ? make_a_copy : set_to_null;
611 } else {
612 if (ref->msotype() == MetaspaceObj::ClassType) {
613 Klass* klass = (Klass*)ref->obj();
614 assert(klass->is_klass(), "must be");
615 if (is_excluded(klass)) {
616 ResourceMark rm;
617 log_debug(cds, dynamic)("Skipping class (excluded): %s", klass->external_name());
618 return set_to_null;
619 }
620 }
621
622 return make_a_copy;
623 }
624 }
625
626 void ArchiveBuilder::start_dump_region(DumpRegion* next) {
627 address bottom = _last_verified_top;
628 address top = (address)(current_dump_region()->top());
629 _other_region_used_bytes += size_t(top - bottom);
630
779 ArchivePtrMarker::mark_pointer(ptr_location);
780 }
781 }
782
783 void ArchiveBuilder::mark_and_relocate_to_buffered_addr(address* ptr_location) {
784 assert(*ptr_location != nullptr, "sanity");
785 if (!is_in_mapped_static_archive(*ptr_location)) {
786 *ptr_location = get_buffered_addr(*ptr_location);
787 }
788 ArchivePtrMarker::mark_pointer(ptr_location);
789 }
790
791 address ArchiveBuilder::get_buffered_addr(address src_addr) const {
792 SourceObjInfo* p = _src_obj_table.get(src_addr);
793 assert(p != nullptr, "src_addr " INTPTR_FORMAT " is used but has not been archived",
794 p2i(src_addr));
795
796 return p->buffered_addr();
797 }
798
799 bool ArchiveBuilder::has_been_archived(address src_addr) const {
800 SourceObjInfo* p = _src_obj_table.get(src_addr);
801 return (p != nullptr);
802 }
803
804 address ArchiveBuilder::get_source_addr(address buffered_addr) const {
805 assert(is_in_buffer_space(buffered_addr), "must be");
806 address* src_p = _buffered_to_src_table.get(buffered_addr);
807 assert(src_p != nullptr && *src_p != nullptr, "must be");
808 return *src_p;
809 }
810
811 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
812 for (int i = 0; i < src_objs->objs()->length(); i++) {
813 src_objs->relocate(i, this);
814 }
815 }
816
817 void ArchiveBuilder::relocate_metaspaceobj_embedded_pointers() {
818 log_info(cds)("Relocating embedded pointers in core regions ... ");
819 relocate_embedded_pointers(&_rw_src_objs);
820 relocate_embedded_pointers(&_ro_src_objs);
821 }
822
823 #define ADD_COUNT(x) \
824 x += 1; \
825 x ## _p += preloaded; \
826 x ## _i += inited;
827
828 #define DECLARE_INSTANCE_KLASS_COUNTER(x) \
829 int x = 0; \
830 int x ## _p = 0; \
831 int x ## _i = 0; \
832
833 void ArchiveBuilder::make_klasses_shareable() {
834 DECLARE_INSTANCE_KLASS_COUNTER(num_instance_klasses);
835 DECLARE_INSTANCE_KLASS_COUNTER(num_boot_klasses);
836 DECLARE_INSTANCE_KLASS_COUNTER(num_vm_klasses);
837 DECLARE_INSTANCE_KLASS_COUNTER(num_platform_klasses);
838 DECLARE_INSTANCE_KLASS_COUNTER(num_app_klasses);
839 DECLARE_INSTANCE_KLASS_COUNTER(num_hidden_klasses);
840 DECLARE_INSTANCE_KLASS_COUNTER(num_unlinked_klasses);
841 DECLARE_INSTANCE_KLASS_COUNTER(num_unregistered_klasses);
842 int num_obj_array_klasses = 0;
843 int num_type_array_klasses = 0;
844
845 int boot_unlinked = 0;
846 int platform_unlinked = 0;
847 int app_unlinked = 0;
848 int unreg_unlinked = 0;
849
850 for (int i = 0; i < klasses()->length(); i++) {
851 // Some of the code in ConstantPool::remove_unshareable_info() requires the classes
852 // to be in linked state, so it must be call here before the next loop, which returns
853 // all classes to unlinked state.
854 Klass* k = get_buffered_addr(klasses()->at(i));
855 if (k->is_instance_klass()) {
856 InstanceKlass::cast(k)->constants()->remove_unshareable_info();
857 }
858 }
859
860 for (int i = 0; i < klasses()->length(); i++) {
861 const char* type;
862 const char* unlinked = "";
863 const char* hidden = "";
864 const char* generated = "";
865 const char* preloaded_msg = "";
866 const char* inited_msg = "";
867 Klass* k = get_buffered_addr(klasses()->at(i));
868 k->remove_java_mirror();
869 if (k->is_objArray_klass()) {
870 // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
871 // on their array classes.
872 num_obj_array_klasses ++;
873 type = "array";
874 } else if (k->is_typeArray_klass()) {
875 num_type_array_klasses ++;
876 type = "array";
877 k->remove_unshareable_info();
878 } else {
879 assert(k->is_instance_klass(), " must be");
880 InstanceKlass* ik = InstanceKlass::cast(k);
881 InstanceKlass* src_ik = get_source_addr(ik);
882 int preloaded = ClassPreloader::is_preloaded_class(src_ik);
883 int inited = ik->has_preinitialized_mirror();
884 ADD_COUNT(num_instance_klasses);
885 if (CDSConfig::is_dumping_dynamic_archive()) {
886 // For static dump, class loader type are already set.
887 ik->assign_class_loader_type();
888 }
889 if (ik->is_hidden()) {
890 oop loader = k->class_loader();
891 if (loader == nullptr) {
892 type = "boot";
893 ADD_COUNT(num_boot_klasses);
894 } else if (loader == SystemDictionary::java_platform_loader()) {
895 type = "plat";
896 ADD_COUNT(num_platform_klasses);
897 } else if (loader == SystemDictionary::java_system_loader()) {
898 type = "app";
899 ADD_COUNT(num_app_klasses);
900 } else {
901 type = "bad";
902 assert(0, "shouldn't happen");
903 }
904 } else if (ik->is_shared_boot_class()) {
905 type = "boot";
906 ADD_COUNT(num_boot_klasses);
907 } else if (ik->is_shared_platform_class()) {
908 type = "plat";
909 ADD_COUNT(num_platform_klasses);
910 } else if (ik->is_shared_app_class()) {
911 type = "app";
912 ADD_COUNT(num_app_klasses);
913 } else {
914 assert(ik->is_shared_unregistered_class(), "must be");
915 type = "unreg";
916 ADD_COUNT(num_unregistered_klasses);
917 }
918
919 if (ClassPreloader::is_vm_class(src_ik)) {
920 ADD_COUNT(num_vm_klasses);
921 }
922
923 if (!ik->is_linked()) {
924 ADD_COUNT(num_unlinked_klasses);
925 unlinked = " unlinked";
926 if (ik->is_shared_boot_class()) {
927 boot_unlinked ++;
928 } else if (ik->is_shared_platform_class()) {
929 platform_unlinked ++;
930 } else if (ik->is_shared_app_class()) {
931 app_unlinked ++;
932 } else {
933 unreg_unlinked ++;
934 }
935 }
936
937 if (ik->is_hidden()) {
938 ADD_COUNT(num_hidden_klasses);
939 hidden = " hidden";
940 }
941
942 if (ik->is_generated_shared_class()) {
943 generated = " generated";
944 }
945 if (preloaded) {
946 preloaded_msg = " preloaded";
947 }
948 if (inited) {
949 inited_msg = " inited";
950 }
951
952 MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread::current(), ik);
953 ik->remove_unshareable_info();
954 }
955
956 if (log_is_enabled(Debug, cds, class)) {
957 ResourceMark rm;
958 log_debug(cds, class)("klasses[%5d] = " PTR_FORMAT " %-5s %s%s%s%s%s%s", i,
959 p2i(to_requested(k)), type, k->external_name(),
960 hidden, unlinked, generated, preloaded_msg, inited_msg);
961 }
962 }
963
964 #define STATS_FORMAT "= %5d, preloaded = %5d, inited = %5d"
965 #define STATS_PARAMS(x) num_ ## x, num_ ## x ## _p, num_ ## x ## _i
966
967 log_info(cds)("Number of classes %d", num_instance_klasses + num_obj_array_klasses + num_type_array_klasses);
968 log_info(cds)(" instance classes " STATS_FORMAT, STATS_PARAMS(instance_klasses));
969 log_info(cds)(" boot " STATS_FORMAT, STATS_PARAMS(boot_klasses));
970 log_info(cds)(" vm " STATS_FORMAT, STATS_PARAMS(vm_klasses));
971 log_info(cds)(" platform " STATS_FORMAT, STATS_PARAMS(platform_klasses));
972 log_info(cds)(" app " STATS_FORMAT, STATS_PARAMS(app_klasses));
973 log_info(cds)(" unregistered " STATS_FORMAT, STATS_PARAMS(unregistered_klasses));
974 log_info(cds)(" (hidden) " STATS_FORMAT, STATS_PARAMS(hidden_klasses));
975 log_info(cds)(" (unlinked) " STATS_FORMAT ", boot = %d, plat = %d, app = %d, unreg = %d",
976 STATS_PARAMS(unlinked_klasses),
977 boot_unlinked, platform_unlinked,
978 app_unlinked, unreg_unlinked);
979 log_info(cds)(" obj array classes = %5d", num_obj_array_klasses);
980 log_info(cds)(" type array classes = %5d", num_type_array_klasses);
981 log_info(cds)(" symbols = %5d", _symbols->length());
982
983 #undef STATS_FORMAT
984 #undef STATS_PARAMS
985
986 DynamicArchive::make_array_klasses_shareable();
987 }
988
989 void ArchiveBuilder::make_training_data_shareable() {
990 auto clean_td = [&] (address& src_obj, SourceObjInfo& info) {
991 if (!is_in_buffer_space(info.buffered_addr())) {
992 return;
993 }
994
995 if (info.msotype() == MetaspaceObj::KlassTrainingDataType ||
996 info.msotype() == MetaspaceObj::MethodTrainingDataType ||
997 info.msotype() == MetaspaceObj::CompileTrainingDataType) {
998 TrainingData* buffered_td = (TrainingData*)info.buffered_addr();
999 buffered_td->remove_unshareable_info();
1000 } else if (info.msotype() == MetaspaceObj::MethodDataType) {
1001 MethodData* buffered_mdo = (MethodData*)info.buffered_addr();
1002 buffered_mdo->remove_unshareable_info();
1003 } else if (info.msotype() == MetaspaceObj::MethodCountersType) {
1004 MethodCounters* buffered_mc = (MethodCounters*)info.buffered_addr();
1005 buffered_mc->remove_unshareable_info();
1006 }
1007 };
1008 _src_obj_table.iterate_all(clean_td);
1009 }
1010
1011 void ArchiveBuilder::serialize_dynamic_archivable_items(SerializeClosure* soc) {
1012 SymbolTable::serialize_shared_table_header(soc, false);
1013 SystemDictionaryShared::serialize_dictionary_headers(soc, false);
1014 DynamicArchive::serialize_array_klasses(soc);
1015 ClassPreloader::serialize(soc, false);
1016 FinalImageRecipes::serialize(soc, false);
1017 TrainingData::serialize_training_data(soc);
1018 }
1019
1020 uintx ArchiveBuilder::buffer_to_offset(address p) const {
1021 address requested_p = to_requested(p);
1022 assert(requested_p >= _requested_static_archive_bottom, "must be");
1023 return requested_p - _requested_static_archive_bottom;
1024 }
1025
1026 uintx ArchiveBuilder::any_to_offset(address p) const {
1027 if (is_in_mapped_static_archive(p)) {
1028 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
1029 return p - _mapped_static_archive_bottom;
1030 }
1031 if (!is_in_buffer_space(p)) {
1032 // p must be a "source" address
1033 p = get_buffered_addr(p);
1034 }
1035 return buffer_to_offset(p);
1036 }
1037
1038 void ArchiveBuilder::start_cc_region() {
1039 ro_region()->pack();
1040 start_dump_region(&_cc_region);
1041 }
1042
1043 void ArchiveBuilder::end_cc_region() {
1044 _cc_region.pack();
1045 }
1046
1047 #if INCLUDE_CDS_JAVA_HEAP
1048 narrowKlass ArchiveBuilder::get_requested_narrow_klass(Klass* k) {
1049 assert(CDSConfig::is_dumping_heap(), "sanity");
1050 k = get_buffered_klass(k);
1051 Klass* requested_k = to_requested(k);
1052 address narrow_klass_base = _requested_static_archive_bottom; // runtime encoding base == runtime mapping start
1053 const int narrow_klass_shift = ArchiveHeapWriter::precomputed_narrow_klass_shift;
1054 return CompressedKlassPointers::encode_not_null(requested_k, narrow_klass_base, narrow_klass_shift);
1055 }
1056 #endif // INCLUDE_CDS_JAVA_HEAP
1057
1058 // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
1059 // so that the archive can be mapped to the "requested" location without runtime relocation.
1060 //
1061 // - See ArchiveBuilder header for the definition of "buffer", "mapped" and "requested"
1062 // - ArchivePtrMarker::ptrmap() marks all the pointers in the rw/ro regions
1063 // - Every pointer must have one of the following values:
1064 // [a] nullptr:
1065 // No relocation is needed. Remove this pointer from ptrmap so we don't need to
1066 // consider it at runtime.
1116 } else {
1117 assert(_builder->is_in_mapped_static_archive(*p), "old pointer must point inside buffer space or mapped static archive");
1118 *p += _mapped_to_requested_static_archive_delta;
1119 assert(_builder->is_in_requested_static_archive(*p), "new pointer must point inside requested archive");
1120 }
1121 }
1122 _max_non_null_offset = offset;
1123 }
1124
1125 return true; // keep iterating
1126 }
1127
1128 void doit() {
1129 ArchivePtrMarker::ptrmap()->iterate(this);
1130 ArchivePtrMarker::compact(_max_non_null_offset);
1131 }
1132 };
1133
1134
1135 void ArchiveBuilder::relocate_to_requested() {
1136 if (!ro_region()->is_packed()) {
1137 ro_region()->pack();
1138 }
1139
1140 size_t my_archive_size = buffer_top() - buffer_bottom();
1141
1142 if (CDSConfig::is_dumping_static_archive()) {
1143 _requested_static_archive_top = _requested_static_archive_bottom + my_archive_size;
1144 RelocateBufferToRequested<true> patcher(this);
1145 patcher.doit();
1146 } else {
1147 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
1148 _requested_dynamic_archive_top = _requested_dynamic_archive_bottom + my_archive_size;
1149 RelocateBufferToRequested<false> patcher(this);
1150 patcher.doit();
1151 }
1152 }
1153
1154 // Write detailed info to a mapfile to analyze contents of the archive.
1155 // static dump:
1156 // java -Xshare:dump -Xlog:cds+map=trace:file=cds.map:none:filesize=0
1157 // dynamic dump:
1158 // java -cp MyApp.jar -XX:ArchiveClassesAtExit=MyApp.jsa \
1266 base = requested_base;
1267 top = requested_base + size;
1268 log_info(cds, map)("[%-18s " PTR_FORMAT " - " PTR_FORMAT " " SIZE_FORMAT_W(9) " bytes]",
1269 name, p2i(base), p2i(top), size);
1270 }
1271
1272 #if INCLUDE_CDS_JAVA_HEAP
1273 static void log_heap_region(ArchiveHeapInfo* heap_info) {
1274 MemRegion r = heap_info->buffer_region();
1275 address start = address(r.start()); // start of the current oop inside the buffer
1276 address end = address(r.end());
1277 log_region("heap", start, end, ArchiveHeapWriter::buffered_addr_to_requested_addr(start));
1278
1279 LogStreamHandle(Info, cds, map) st;
1280
1281 while (start < end) {
1282 size_t byte_size;
1283 oop source_oop = ArchiveHeapWriter::buffered_addr_to_source_obj(start);
1284 address requested_start = ArchiveHeapWriter::buffered_addr_to_requested_addr(start);
1285 st.print(PTR_FORMAT ": @@ Object ", p2i(requested_start));
1286 int permobj_segment = -1;
1287 int permobj_segment_length = -1;
1288
1289 if (source_oop != nullptr) {
1290 // This is a regular oop that got archived.
1291 print_oop_with_requested_addr_cr(&st, source_oop, false);
1292 byte_size = source_oop->size() * BytesPerWord;
1293 } else if (start == ArchiveHeapWriter::buffered_heap_roots_addr()) {
1294 // HeapShared::roots() is copied specially, so it doesn't exist in
1295 // ArchiveHeapWriter::BufferOffsetToSourceObjectTable.
1296 // See ArchiveHeapWriter::copy_roots_to_buffer().
1297 st.print_cr("HeapShared::roots[%d]", HeapShared::pending_roots()->length());
1298 byte_size = ArchiveHeapWriter::heap_roots_word_size() * BytesPerWord;
1299 } else if ((byte_size = ArchiveHeapWriter::get_filler_size_at(start)) > 0) {
1300 // We have a filler oop, which also does not exist in BufferOffsetToSourceObjectTable.
1301 st.print_cr("filler " SIZE_FORMAT " bytes", byte_size);
1302 } else if ((permobj_segment = ArchiveHeapWriter::get_permobj_segment_at(start, &byte_size, &permobj_segment_length)) >= 0) {
1303 st.print_cr("permobj_%d[%d] %zu bytes", permobj_segment, permobj_segment_length, byte_size);
1304 } else {
1305 ShouldNotReachHere();
1306 }
1307
1308 address oop_end = start + byte_size;
1309 log_as_hex(start, oop_end, requested_start, /*is_heap=*/true);
1310
1311 if (source_oop != nullptr) {
1312 log_oop_details(heap_info, source_oop, /*buffered_addr=*/start);
1313 } else if (start == ArchiveHeapWriter::buffered_heap_roots_addr()) {
1314 log_heap_roots();
1315 } else if (permobj_segment >= 0) {
1316 log_permobj_segment(permobj_segment, permobj_segment_length);
1317 }
1318 start = oop_end;
1319 }
1320 }
1321
1322 // ArchivedFieldPrinter is used to print the fields of archived objects. We can't
1323 // use _source_obj->print_on(), because we want to print the oop fields
1324 // in _source_obj with their requested addresses using print_oop_with_requested_addr_cr().
1325 class ArchivedFieldPrinter : public FieldClosure {
1326 ArchiveHeapInfo* _heap_info;
1327 outputStream* _st;
1328 oop _source_obj;
1329 address _buffered_addr;
1330 public:
1331 ArchivedFieldPrinter(ArchiveHeapInfo* heap_info, outputStream* st, oop src_obj, address buffered_addr) :
1332 _heap_info(heap_info), _st(st), _source_obj(src_obj), _buffered_addr(buffered_addr) {}
1333
1334 void do_field(fieldDescriptor* fd) {
1335 _st->print(" - ");
1336 BasicType ft = fd->field_type();
1383 ArchiveBuilder* builder = ArchiveBuilder::current();
1384 Klass* requested_klass = builder->to_requested(builder->get_buffered_addr(source_klass));
1385
1386 st.print(" - klass: ");
1387 source_klass->print_value_on(&st);
1388 st.print(" " PTR_FORMAT, p2i(requested_klass));
1389 st.cr();
1390
1391 if (source_oop->is_typeArray()) {
1392 TypeArrayKlass::cast(source_klass)->oop_print_elements_on(typeArrayOop(source_oop), &st);
1393 } else if (source_oop->is_objArray()) {
1394 objArrayOop source_obj_array = objArrayOop(source_oop);
1395 for (int i = 0; i < source_obj_array->length(); i++) {
1396 st.print(" -%4d: ", i);
1397 print_oop_with_requested_addr_cr(&st, source_obj_array->obj_at(i));
1398 }
1399 } else {
1400 st.print_cr(" - fields (" SIZE_FORMAT " words):", source_oop->size());
1401 ArchivedFieldPrinter print_field(heap_info, &st, source_oop, buffered_addr);
1402 InstanceKlass::cast(source_klass)->print_nonstatic_fields(&print_field);
1403
1404 if (java_lang_Class::is_instance(source_oop)) {
1405 st.print(" - signature: ");
1406 if (java_lang_Class::is_primitive(source_oop)) {
1407 st.print("primitive ??");
1408 } else {
1409 java_lang_Class::print_signature(source_oop, &st);
1410 }
1411 st.cr();
1412 }
1413 }
1414 }
1415 }
1416
1417 static void log_heap_roots() {
1418 LogStreamHandle(Trace, cds, map, oops) st;
1419 if (st.is_enabled()) {
1420 for (int i = 0; i < HeapShared::pending_roots()->length(); i++) {
1421 st.print("roots[%4d]: ", i);
1422 print_oop_with_requested_addr_cr(&st, HeapShared::pending_roots()->at(i));
1423 }
1424 }
1425 }
1426
1427 static void log_permobj_segment(int permobj_segment, int permobj_segment_length) {
1428 LogStreamHandle(Trace, cds, map, oops) st;
1429 if (st.is_enabled()) {
1430 for (int i = 0; i < permobj_segment_length; i++) {
1431 st.print("permobj_%d[%4d]: ", permobj_segment, i);
1432 print_oop_with_requested_addr_cr(&st, ArchiveHeapWriter::get_permobj_source_addr(permobj_segment, i));
1433 }
1434 }
1435 }
1436
1437
1438 // The output looks like this. The first number is the requested address. The second number is
1439 // the narrowOop version of the requested address.
1440 // 0x00000007ffc7e840 (0xfff8fd08) java.lang.Class
1441 // 0x00000007ffc000f8 (0xfff8001f) [B length: 11
1442 static void print_oop_with_requested_addr_cr(outputStream* st, oop source_oop, bool print_addr = true) {
1443 if (source_oop == nullptr) {
1444 st->print_cr("null");
1445 } else {
1446 ResourceMark rm;
1447 oop requested_obj = ArchiveHeapWriter::source_obj_to_requested_obj(source_oop);
1448 if (print_addr) {
1449 st->print(PTR_FORMAT " ", p2i(requested_obj));
1450 }
1451 if (UseCompressedOops) {
1452 st->print("(0x%08x) ", CompressedOops::narrow_oop_value(requested_obj));
1453 }
1454 if (source_oop->is_array()) {
1455 int array_len = arrayOop(source_oop)->length();
1456 st->print_cr("%s length: %d", source_oop->klass()->external_name(), array_len);
1457 } else {
1458 st->print("%s", source_oop->klass()->external_name());
1459 if (java_lang_invoke_MethodType::is_instance(source_oop)) {
1460 st->print(" ");
1461 java_lang_invoke_MethodType::print_signature(source_oop, st);
1462 }
1463 st->cr();
1464 }
1465 }
1466 }
1467 #endif // INCLUDE_CDS_JAVA_HEAP
1468
1469 // Log all the data [base...top). Pretend that the base address
1470 // will be mapped to requested_base at run-time.
1471 static void log_as_hex(address base, address top, address requested_base, bool is_heap = false) {
1472 assert(top >= base, "must be");
1473
1474 LogStreamHandle(Trace, cds, map) lsh;
1475 if (lsh.is_enabled()) {
1476 int unitsize = sizeof(address);
1477 if (is_heap && UseCompressedOops) {
1478 // This makes the compressed oop pointers easier to read, but
1479 // longs and doubles will be split into two words.
1480 unitsize = sizeof(narrowOop);
1481 }
1482 os::print_hex_dump(&lsh, base, top, unitsize, /* print_ascii=*/true, /* bytes_per_line=*/32, requested_base);
1483 }
1516 if (heap_info->is_used()) {
1517 log_heap_region(heap_info);
1518 }
1519 #endif
1520
1521 log_info(cds, map)("[End of CDS archive map]");
1522 }
1523 }; // end ArchiveBuilder::CDSMapLogger
1524
1525 void ArchiveBuilder::print_stats() {
1526 _alloc_stats.print_stats(int(_ro_region.used()), int(_rw_region.used()));
1527 }
1528
1529 void ArchiveBuilder::write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info) {
1530 // Make sure NUM_CDS_REGIONS (exported in cds.h) agrees with
1531 // MetaspaceShared::n_regions (internal to hotspot).
1532 assert(NUM_CDS_REGIONS == MetaspaceShared::n_regions, "sanity");
1533
1534 write_region(mapinfo, MetaspaceShared::rw, &_rw_region, /*read_only=*/false,/*allow_exec=*/false);
1535 write_region(mapinfo, MetaspaceShared::ro, &_ro_region, /*read_only=*/true, /*allow_exec=*/false);
1536 write_region(mapinfo, MetaspaceShared::cc, &_cc_region, /*read_only=*/false,/*allow_exec=*/true);
1537
1538 // Split pointer map into read-write and read-only bitmaps
1539 ArchivePtrMarker::initialize_rw_ro_cc_maps(&_rw_ptrmap, &_ro_ptrmap, &_cc_ptrmap);
1540
1541 size_t bitmap_size_in_bytes;
1542 char* bitmap = mapinfo->write_bitmap_region(ArchivePtrMarker::rw_ptrmap(),
1543 ArchivePtrMarker::ro_ptrmap(),
1544 ArchivePtrMarker::cc_ptrmap(),
1545 heap_info,
1546 bitmap_size_in_bytes);
1547
1548 if (heap_info->is_used()) {
1549 _total_heap_region_size = mapinfo->write_heap_region(heap_info);
1550 }
1551
1552 print_region_stats(mapinfo, heap_info);
1553
1554 mapinfo->set_requested_base((char*)MetaspaceShared::requested_base_address());
1555 mapinfo->set_header_crc(mapinfo->compute_header_crc());
1556 // After this point, we should not write any data into mapinfo->header() since this
1557 // would corrupt its checksum we have calculated before.
1558 mapinfo->write_header();
1559 mapinfo->close();
1560
1561 if (log_is_enabled(Info, cds)) {
1562 print_stats();
1563 }
1564
1565 if (log_is_enabled(Info, cds, map)) {
1571 }
1572
1573 void ArchiveBuilder::write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region, bool read_only, bool allow_exec) {
1574 mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1575 }
1576
1577 void ArchiveBuilder::print_region_stats(FileMapInfo *mapinfo, ArchiveHeapInfo* heap_info) {
1578 // Print statistics of all the regions
1579 const size_t bitmap_used = mapinfo->region_at(MetaspaceShared::bm)->used();
1580 const size_t bitmap_reserved = mapinfo->region_at(MetaspaceShared::bm)->used_aligned();
1581 const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
1582 bitmap_reserved +
1583 _total_heap_region_size;
1584 const size_t total_bytes = _ro_region.used() + _rw_region.used() +
1585 bitmap_used +
1586 _total_heap_region_size;
1587 const double total_u_perc = percent_of(total_bytes, total_reserved);
1588
1589 _rw_region.print(total_reserved);
1590 _ro_region.print(total_reserved);
1591 _cc_region.print(total_reserved);
1592
1593 print_bitmap_region_stats(bitmap_used, total_reserved);
1594
1595 if (heap_info->is_used()) {
1596 print_heap_region_stats(heap_info, total_reserved);
1597 }
1598
1599 log_debug(cds)("total : " SIZE_FORMAT_W(9) " [100.0%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used]",
1600 total_bytes, total_reserved, total_u_perc);
1601 }
1602
1603 void ArchiveBuilder::print_bitmap_region_stats(size_t size, size_t total_size) {
1604 log_debug(cds)("bm space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used]",
1605 size, size/double(total_size)*100.0, size);
1606 }
1607
1608 void ArchiveBuilder::print_heap_region_stats(ArchiveHeapInfo *info, size_t total_size) {
1609 char* start = info->buffer_start();
1610 size_t size = info->buffer_byte_size();
1611 char* top = start + size;
|