10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "cds/archiveBuilder.hpp"
27 #include "cds/archiveHeapWriter.hpp"
28 #include "cds/archiveUtils.hpp"
29 #include "cds/cdsConfig.hpp"
30 #include "cds/cppVtables.hpp"
31 #include "cds/dumpAllocStats.hpp"
32 #include "cds/dynamicArchive.hpp"
33 #include "cds/heapShared.hpp"
34 #include "cds/metaspaceShared.hpp"
35 #include "cds/regeneratedClasses.hpp"
36 #include "classfile/classLoaderDataShared.hpp"
37 #include "classfile/javaClasses.hpp"
38 #include "classfile/symbolTable.hpp"
39 #include "classfile/systemDictionaryShared.hpp"
40 #include "classfile/vmClasses.hpp"
41 #include "interpreter/abstractInterpreter.hpp"
42 #include "jvm.h"
43 #include "logging/log.hpp"
44 #include "logging/logStream.hpp"
45 #include "memory/allStatic.hpp"
46 #include "memory/memRegion.hpp"
47 #include "memory/resourceArea.hpp"
48 #include "oops/compressedKlass.inline.hpp"
49 #include "oops/instanceKlass.hpp"
50 #include "oops/objArrayKlass.hpp"
51 #include "oops/objArrayOop.inline.hpp"
52 #include "oops/oopHandle.inline.hpp"
53 #include "runtime/arguments.hpp"
54 #include "runtime/fieldDescriptor.inline.hpp"
55 #include "runtime/globals_extension.hpp"
56 #include "runtime/javaThread.hpp"
57 #include "runtime/sharedRuntime.hpp"
58 #include "utilities/align.hpp"
59 #include "utilities/bitMap.inline.hpp"
60 #include "utilities/formatBuffer.hpp"
61
62 ArchiveBuilder* ArchiveBuilder::_current = nullptr;
63
64 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
65 char* newtop = ArchiveBuilder::current()->_ro_region.top();
66 ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
67 }
68
69 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K, mtClassShared) {
70 _total_bytes = 0;
71 _objs = new (mtClassShared) GrowableArray<SourceObjInfo*>(128 * K, mtClassShared);
72 }
143
144 RelocateEmbeddedPointers relocator(builder, src_info->buffered_addr(), start);
145 _ptrmap.iterate(&relocator, start, end);
146 }
147
148 ArchiveBuilder::ArchiveBuilder() :
149 _current_dump_region(nullptr),
150 _buffer_bottom(nullptr),
151 _last_verified_top(nullptr),
152 _num_dump_regions_used(0),
153 _other_region_used_bytes(0),
154 _requested_static_archive_bottom(nullptr),
155 _requested_static_archive_top(nullptr),
156 _requested_dynamic_archive_bottom(nullptr),
157 _requested_dynamic_archive_top(nullptr),
158 _mapped_static_archive_bottom(nullptr),
159 _mapped_static_archive_top(nullptr),
160 _buffer_to_requested_delta(0),
161 _rw_region("rw", MAX_SHARED_DELTA),
162 _ro_region("ro", MAX_SHARED_DELTA),
163 _ptrmap(mtClassShared),
164 _rw_ptrmap(mtClassShared),
165 _ro_ptrmap(mtClassShared),
166 _rw_src_objs(),
167 _ro_src_objs(),
168 _src_obj_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
169 _buffered_to_src_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
170 _total_heap_region_size(0),
171 _estimated_metaspaceobj_bytes(0),
172 _estimated_hashtable_bytes(0)
173 {
174 _klasses = new (mtClassShared) GrowableArray<Klass*>(4 * K, mtClassShared);
175 _symbols = new (mtClassShared) GrowableArray<Symbol*>(256 * K, mtClassShared);
176 _entropy_seed = 0x12345678;
177 assert(_current == nullptr, "must be");
178 _current = this;
179 }
180
181 ArchiveBuilder::~ArchiveBuilder() {
182 assert(_current == this, "must be");
183 _current = nullptr;
184
185 for (int i = 0; i < _symbols->length(); i++) {
209 public:
210 GatherKlassesAndSymbols(ArchiveBuilder* builder) : _builder(builder) {}
211
212 virtual bool do_unique_ref(Ref* ref, bool read_only) {
213 return _builder->gather_klass_and_symbol(ref, read_only);
214 }
215 };
216
217 bool ArchiveBuilder::gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only) {
218 if (ref->obj() == nullptr) {
219 return false;
220 }
221 if (get_follow_mode(ref) != make_a_copy) {
222 return false;
223 }
224 if (ref->msotype() == MetaspaceObj::ClassType) {
225 Klass* klass = (Klass*)ref->obj();
226 assert(klass->is_klass(), "must be");
227 if (!is_excluded(klass)) {
228 _klasses->append(klass);
229 }
230 // See RunTimeClassInfo::get_for()
231 _estimated_metaspaceobj_bytes += align_up(BytesPerWord, SharedSpaceObjectAlignment);
232 } else if (ref->msotype() == MetaspaceObj::SymbolType) {
233 // Make sure the symbol won't be GC'ed while we are dumping the archive.
234 Symbol* sym = (Symbol*)ref->obj();
235 sym->increment_refcount();
236 _symbols->append(sym);
237 }
238
239 int bytes = ref->size() * BytesPerWord;
240 _estimated_metaspaceobj_bytes += align_up(bytes, SharedSpaceObjectAlignment);
241
242 return true; // recurse
243 }
244
245 void ArchiveBuilder::gather_klasses_and_symbols() {
246 ResourceMark rm;
247 log_info(cds)("Gathering classes and symbols ... ");
248 GatherKlassesAndSymbols doit(this);
267 // During -Xshare:dump, the order of Symbol creation is strictly determined by
268 // the SharedClassListFile (class loading is done in a single thread and the JIT
269 // is disabled). Also, Symbols are allocated in monotonically increasing addresses
270 // (see Symbol::operator new(size_t, int)). So if we iterate the Symbols by
271 // ascending address order, we ensure that all Symbols are copied into deterministic
272 // locations in the archive.
273 //
274 // TODO: in the future, if we want to produce deterministic contents in the
275 // dynamic archive, we might need to sort the symbols alphabetically (also see
276 // DynamicArchiveBuilder::sort_methods()).
277 log_info(cds)("Sorting symbols ... ");
278 _symbols->sort(compare_symbols_by_address);
279 sort_klasses();
280
281 // TODO -- we need a proper estimate for the archived modules, etc,
282 // but this should be enough for now
283 _estimated_metaspaceobj_bytes += 200 * 1024 * 1024;
284 }
285 }
286
287 int ArchiveBuilder::compare_symbols_by_address(Symbol** a, Symbol** b) {
288 if (a[0] < b[0]) {
289 return -1;
290 } else {
291 assert(a[0] > b[0], "Duplicated symbol %s unexpected", (*a)->as_C_string());
292 return 1;
293 }
294 }
295
296 int ArchiveBuilder::compare_klass_by_name(Klass** a, Klass** b) {
297 return a[0]->name()->fast_compare(b[0]->name());
298 }
299
300 void ArchiveBuilder::sort_klasses() {
301 log_info(cds)("Sorting classes ... ");
302 _klasses->sort(compare_klass_by_name);
303 }
304
305 size_t ArchiveBuilder::estimate_archive_size() {
306 // size of the symbol table and two dictionaries, plus the RunTimeClassInfo's
307 size_t symbol_table_est = SymbolTable::estimate_size_for_archive();
308 size_t dictionary_est = SystemDictionaryShared::estimate_size_for_archive();
309 _estimated_hashtable_bytes = symbol_table_est + dictionary_est;
310
311 size_t total = 0;
312
313 total += _estimated_metaspaceobj_bytes;
314 total += _estimated_hashtable_bytes;
315
316 // allow fragmentation at the end of each dump region
317 total += _total_dump_regions * MetaspaceShared::core_region_alignment();
318
319 log_info(cds)("_estimated_hashtable_bytes = " SIZE_FORMAT " + " SIZE_FORMAT " = " SIZE_FORMAT,
320 symbol_table_est, dictionary_est, _estimated_hashtable_bytes);
321 log_info(cds)("_estimated_metaspaceobj_bytes = " SIZE_FORMAT, _estimated_metaspaceobj_bytes);
322 log_info(cds)("total estimate bytes = " SIZE_FORMAT, total);
323
324 return align_up(total, MetaspaceShared::core_region_alignment());
325 }
326
327 address ArchiveBuilder::reserve_buffer() {
328 size_t buffer_size = estimate_archive_size();
329 ReservedSpace rs(buffer_size, MetaspaceShared::core_region_alignment(), os::vm_page_size());
330 if (!rs.is_reserved()) {
404
405 iterate_roots(it);
406 }
407
408 class GatherSortedSourceObjs : public MetaspaceClosure {
409 ArchiveBuilder* _builder;
410
411 public:
412 GatherSortedSourceObjs(ArchiveBuilder* builder) : _builder(builder) {}
413
414 virtual bool do_ref(Ref* ref, bool read_only) {
415 return _builder->gather_one_source_obj(ref, read_only);
416 }
417 };
418
419 bool ArchiveBuilder::gather_one_source_obj(MetaspaceClosure::Ref* ref, bool read_only) {
420 address src_obj = ref->obj();
421 if (src_obj == nullptr) {
422 return false;
423 }
424 if (RegeneratedClasses::has_been_regenerated(src_obj)) {
425 // No need to copy it. We will later relocate it to point to the regenerated klass/method.
426 return false;
427 }
428 remember_embedded_pointer_in_enclosing_obj(ref);
429
430 FollowMode follow_mode = get_follow_mode(ref);
431 SourceObjInfo src_info(ref, read_only, follow_mode);
432 bool created;
433 SourceObjInfo* p = _src_obj_table.put_if_absent(src_obj, src_info, &created);
434 if (created) {
435 if (_src_obj_table.maybe_grow()) {
436 log_info(cds, hashtables)("Expanded _src_obj_table table to %d", _src_obj_table.table_size());
437 }
438 }
439
440 #ifdef ASSERT
441 if (ref->msotype() == MetaspaceObj::MethodType) {
442 Method* m = (Method*)ref->obj();
443 assert(!RegeneratedClasses::has_been_regenerated((address)m->method_holder()),
444 "Should not archive methods in a class that has been regenerated");
445 }
446 #endif
447
448 assert(p->read_only() == src_info.read_only(), "must be");
508 _rw_src_objs.remember_embedded_pointer(src_info, ref);
509 }
510 }
511 }
512
513 void ArchiveBuilder::gather_source_objs() {
514 ResourceMark rm;
515 log_info(cds)("Gathering all archivable objects ... ");
516 gather_klasses_and_symbols();
517 GatherSortedSourceObjs doit(this);
518 iterate_sorted_roots(&doit);
519 doit.finish();
520 }
521
522 bool ArchiveBuilder::is_excluded(Klass* klass) {
523 if (klass->is_instance_klass()) {
524 InstanceKlass* ik = InstanceKlass::cast(klass);
525 return SystemDictionaryShared::is_excluded_class(ik);
526 } else if (klass->is_objArray_klass()) {
527 Klass* bottom = ObjArrayKlass::cast(klass)->bottom_klass();
528 if (MetaspaceShared::is_shared_static(bottom)) {
529 // The bottom class is in the static archive so it's clearly not excluded.
530 assert(CDSConfig::is_dumping_dynamic_archive(), "sanity");
531 return false;
532 } else if (bottom->is_instance_klass()) {
533 return SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(bottom));
534 }
535 }
536
537 return false;
538 }
539
540 ArchiveBuilder::FollowMode ArchiveBuilder::get_follow_mode(MetaspaceClosure::Ref *ref) {
541 address obj = ref->obj();
542 if (MetaspaceShared::is_in_shared_metaspace(obj)) {
543 // Don't dump existing shared metadata again.
544 return point_to_it;
545 } else if (ref->msotype() == MetaspaceObj::MethodDataType ||
546 ref->msotype() == MetaspaceObj::MethodCountersType) {
547 return set_to_null;
548 } else {
549 if (ref->msotype() == MetaspaceObj::ClassType) {
550 Klass* klass = (Klass*)ref->obj();
551 assert(klass->is_klass(), "must be");
552 if (is_excluded(klass)) {
553 ResourceMark rm;
554 log_debug(cds, dynamic)("Skipping class (excluded): %s", klass->external_name());
555 return set_to_null;
556 }
557 }
558
559 return make_a_copy;
560 }
561 }
562
563 void ArchiveBuilder::start_dump_region(DumpRegion* next) {
564 address bottom = _last_verified_top;
565 address top = (address)(current_dump_region()->top());
566 _other_region_used_bytes += size_t(top - bottom);
567
700
701 log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d", p2i(src), p2i(dest), bytes);
702 src_info->set_buffered_addr((address)dest);
703
704 _alloc_stats.record(src_info->msotype(), int(newtop - oldtop), src_info->read_only());
705 }
706
707 // This is used by code that hand-assembles data structures, such as the LambdaProxyClassKey, that are
708 // not handled by MetaspaceClosure.
709 void ArchiveBuilder::write_pointer_in_buffer(address* ptr_location, address src_addr) {
710 assert(is_in_buffer_space(ptr_location), "must be");
711 if (src_addr == nullptr) {
712 *ptr_location = nullptr;
713 ArchivePtrMarker::clear_pointer(ptr_location);
714 } else {
715 *ptr_location = get_buffered_addr(src_addr);
716 ArchivePtrMarker::mark_pointer(ptr_location);
717 }
718 }
719
720 address ArchiveBuilder::get_buffered_addr(address src_addr) const {
721 SourceObjInfo* p = _src_obj_table.get(src_addr);
722 assert(p != nullptr, "src_addr " INTPTR_FORMAT " is used but has not been archived",
723 p2i(src_addr));
724
725 return p->buffered_addr();
726 }
727
728 address ArchiveBuilder::get_source_addr(address buffered_addr) const {
729 assert(is_in_buffer_space(buffered_addr), "must be");
730 address* src_p = _buffered_to_src_table.get(buffered_addr);
731 assert(src_p != nullptr && *src_p != nullptr, "must be");
732 return *src_p;
733 }
734
735 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
736 for (int i = 0; i < src_objs->objs()->length(); i++) {
737 src_objs->relocate(i, this);
738 }
739 }
740
741 void ArchiveBuilder::relocate_metaspaceobj_embedded_pointers() {
742 log_info(cds)("Relocating embedded pointers in core regions ... ");
743 relocate_embedded_pointers(&_rw_src_objs);
744 relocate_embedded_pointers(&_ro_src_objs);
745 }
746
747 void ArchiveBuilder::make_klasses_shareable() {
748 int num_instance_klasses = 0;
749 int num_boot_klasses = 0;
750 int num_platform_klasses = 0;
751 int num_app_klasses = 0;
752 int num_hidden_klasses = 0;
753 int num_unlinked_klasses = 0;
754 int num_unregistered_klasses = 0;
755 int num_obj_array_klasses = 0;
756 int num_type_array_klasses = 0;
757
758 for (int i = 0; i < klasses()->length(); i++) {
759 const char* type;
760 const char* unlinked = "";
761 const char* hidden = "";
762 const char* generated = "";
763 Klass* k = get_buffered_addr(klasses()->at(i));
764 k->remove_java_mirror();
765 if (k->is_objArray_klass()) {
766 // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
767 // on their array classes.
768 num_obj_array_klasses ++;
769 type = "array";
770 } else if (k->is_typeArray_klass()) {
771 num_type_array_klasses ++;
772 type = "array";
773 k->remove_unshareable_info();
774 } else {
775 assert(k->is_instance_klass(), " must be");
776 num_instance_klasses ++;
777 InstanceKlass* ik = InstanceKlass::cast(k);
778 if (ik->is_shared_boot_class()) {
779 type = "boot";
780 num_boot_klasses ++;
781 } else if (ik->is_shared_platform_class()) {
782 type = "plat";
783 num_platform_klasses ++;
784 } else if (ik->is_shared_app_class()) {
785 type = "app";
786 num_app_klasses ++;
787 } else {
788 assert(ik->is_shared_unregistered_class(), "must be");
789 type = "unreg";
790 num_unregistered_klasses ++;
791 }
792
793 if (!ik->is_linked()) {
794 num_unlinked_klasses ++;
795 unlinked = " ** unlinked";
796 }
797
798 if (ik->is_hidden()) {
799 num_hidden_klasses ++;
800 hidden = " ** hidden";
801 }
802
803 if (ik->is_generated_shared_class()) {
804 generated = " ** generated";
805 }
806 MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread::current(), ik);
807 ik->remove_unshareable_info();
808 }
809
810 if (log_is_enabled(Debug, cds, class)) {
811 ResourceMark rm;
812 log_debug(cds, class)("klasses[%5d] = " PTR_FORMAT " %-5s %s%s%s%s", i,
813 p2i(to_requested(k)), type, k->external_name(),
814 hidden, unlinked, generated);
815 }
816 }
817
818 log_info(cds)("Number of classes %d", num_instance_klasses + num_obj_array_klasses + num_type_array_klasses);
819 log_info(cds)(" instance classes = %5d", num_instance_klasses);
820 log_info(cds)(" boot = %5d", num_boot_klasses);
821 log_info(cds)(" app = %5d", num_app_klasses);
822 log_info(cds)(" platform = %5d", num_platform_klasses);
823 log_info(cds)(" unregistered = %5d", num_unregistered_klasses);
824 log_info(cds)(" (hidden) = %5d", num_hidden_klasses);
825 log_info(cds)(" (unlinked) = %5d", num_unlinked_klasses);
826 log_info(cds)(" obj array classes = %5d", num_obj_array_klasses);
827 log_info(cds)(" type array classes = %5d", num_type_array_klasses);
828 log_info(cds)(" symbols = %5d", _symbols->length());
829
830 DynamicArchive::make_array_klasses_shareable();
831 }
832
833 void ArchiveBuilder::serialize_dynamic_archivable_items(SerializeClosure* soc) {
834 SymbolTable::serialize_shared_table_header(soc, false);
835 SystemDictionaryShared::serialize_dictionary_headers(soc, false);
836 DynamicArchive::serialize_array_klasses(soc);
837 }
838
839 uintx ArchiveBuilder::buffer_to_offset(address p) const {
840 address requested_p = to_requested(p);
841 assert(requested_p >= _requested_static_archive_bottom, "must be");
842 return requested_p - _requested_static_archive_bottom;
843 }
844
845 uintx ArchiveBuilder::any_to_offset(address p) const {
846 if (is_in_mapped_static_archive(p)) {
847 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
848 return p - _mapped_static_archive_bottom;
849 }
850 if (!is_in_buffer_space(p)) {
851 // p must be a "source" address
852 p = get_buffered_addr(p);
853 }
854 return buffer_to_offset(p);
855 }
856
857 #if INCLUDE_CDS_JAVA_HEAP
858 narrowKlass ArchiveBuilder::get_requested_narrow_klass(Klass* k) {
859 assert(CDSConfig::is_dumping_heap(), "sanity");
860 k = get_buffered_klass(k);
861 Klass* requested_k = to_requested(k);
862 address narrow_klass_base = _requested_static_archive_bottom; // runtime encoding base == runtime mapping start
863 const int narrow_klass_shift = ArchiveHeapWriter::precomputed_narrow_klass_shift;
864 return CompressedKlassPointers::encode_not_null(requested_k, narrow_klass_base, narrow_klass_shift);
865 }
866 #endif // INCLUDE_CDS_JAVA_HEAP
867
868 // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
869 // so that the archive can be mapped to the "requested" location without runtime relocation.
870 //
871 // - See ArchiveBuilder header for the definition of "buffer", "mapped" and "requested"
872 // - ArchivePtrMarker::ptrmap() marks all the pointers in the rw/ro regions
873 // - Every pointer must have one of the following values:
874 // [a] nullptr:
875 // No relocation is needed. Remove this pointer from ptrmap so we don't need to
876 // consider it at runtime.
926 } else {
927 assert(_builder->is_in_mapped_static_archive(*p), "old pointer must point inside buffer space or mapped static archive");
928 *p += _mapped_to_requested_static_archive_delta;
929 assert(_builder->is_in_requested_static_archive(*p), "new pointer must point inside requested archive");
930 }
931 }
932 _max_non_null_offset = offset;
933 }
934
935 return true; // keep iterating
936 }
937
938 void doit() {
939 ArchivePtrMarker::ptrmap()->iterate(this);
940 ArchivePtrMarker::compact(_max_non_null_offset);
941 }
942 };
943
944
945 void ArchiveBuilder::relocate_to_requested() {
946 ro_region()->pack();
947
948 size_t my_archive_size = buffer_top() - buffer_bottom();
949
950 if (CDSConfig::is_dumping_static_archive()) {
951 _requested_static_archive_top = _requested_static_archive_bottom + my_archive_size;
952 RelocateBufferToRequested<true> patcher(this);
953 patcher.doit();
954 } else {
955 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
956 _requested_dynamic_archive_top = _requested_dynamic_archive_bottom + my_archive_size;
957 RelocateBufferToRequested<false> patcher(this);
958 patcher.doit();
959 }
960 }
961
962 // Write detailed info to a mapfile to analyze contents of the archive.
963 // static dump:
964 // java -Xshare:dump -Xlog:cds+map=trace:file=cds.map:none:filesize=0
965 // dynamic dump:
966 // java -cp MyApp.jar -XX:ArchiveClassesAtExit=MyApp.jsa \
1074 base = requested_base;
1075 top = requested_base + size;
1076 log_info(cds, map)("[%-18s " PTR_FORMAT " - " PTR_FORMAT " " SIZE_FORMAT_W(9) " bytes]",
1077 name, p2i(base), p2i(top), size);
1078 }
1079
1080 #if INCLUDE_CDS_JAVA_HEAP
1081 static void log_heap_region(ArchiveHeapInfo* heap_info) {
1082 MemRegion r = heap_info->buffer_region();
1083 address start = address(r.start()); // start of the current oop inside the buffer
1084 address end = address(r.end());
1085 log_region("heap", start, end, ArchiveHeapWriter::buffered_addr_to_requested_addr(start));
1086
1087 LogStreamHandle(Info, cds, map) st;
1088
1089 while (start < end) {
1090 size_t byte_size;
1091 oop source_oop = ArchiveHeapWriter::buffered_addr_to_source_obj(start);
1092 address requested_start = ArchiveHeapWriter::buffered_addr_to_requested_addr(start);
1093 st.print(PTR_FORMAT ": @@ Object ", p2i(requested_start));
1094
1095 if (source_oop != nullptr) {
1096 // This is a regular oop that got archived.
1097 print_oop_with_requested_addr_cr(&st, source_oop, false);
1098 byte_size = source_oop->size() * BytesPerWord;
1099 } else if (start == ArchiveHeapWriter::buffered_heap_roots_addr()) {
1100 // HeapShared::roots() is copied specially, so it doesn't exist in
1101 // ArchiveHeapWriter::BufferOffsetToSourceObjectTable.
1102 // See ArchiveHeapWriter::copy_roots_to_buffer().
1103 st.print_cr("HeapShared::roots[%d]", HeapShared::pending_roots()->length());
1104 byte_size = ArchiveHeapWriter::heap_roots_word_size() * BytesPerWord;
1105 } else if ((byte_size = ArchiveHeapWriter::get_filler_size_at(start)) > 0) {
1106 // We have a filler oop, which also does not exist in BufferOffsetToSourceObjectTable.
1107 st.print_cr("filler " SIZE_FORMAT " bytes", byte_size);
1108 } else {
1109 ShouldNotReachHere();
1110 }
1111
1112 address oop_end = start + byte_size;
1113 log_as_hex(start, oop_end, requested_start, /*is_heap=*/true);
1114
1115 if (source_oop != nullptr) {
1116 log_oop_details(heap_info, source_oop, /*buffered_addr=*/start);
1117 } else if (start == ArchiveHeapWriter::buffered_heap_roots_addr()) {
1118 log_heap_roots();
1119 }
1120 start = oop_end;
1121 }
1122 }
1123
1124 // ArchivedFieldPrinter is used to print the fields of archived objects. We can't
1125 // use _source_obj->print_on(), because we want to print the oop fields
1126 // in _source_obj with their requested addresses using print_oop_with_requested_addr_cr().
1127 class ArchivedFieldPrinter : public FieldClosure {
1128 ArchiveHeapInfo* _heap_info;
1129 outputStream* _st;
1130 oop _source_obj;
1131 address _buffered_addr;
1132 public:
1133 ArchivedFieldPrinter(ArchiveHeapInfo* heap_info, outputStream* st, oop src_obj, address buffered_addr) :
1134 _heap_info(heap_info), _st(st), _source_obj(src_obj), _buffered_addr(buffered_addr) {}
1135
1136 void do_field(fieldDescriptor* fd) {
1137 _st->print(" - ");
1138 BasicType ft = fd->field_type();
1199 print_oop_with_requested_addr_cr(&st, source_obj_array->obj_at(i));
1200 }
1201 } else {
1202 st.print_cr(" - fields (" SIZE_FORMAT " words):", source_oop->size());
1203 ArchivedFieldPrinter print_field(heap_info, &st, source_oop, buffered_addr);
1204 InstanceKlass::cast(source_klass)->print_nonstatic_fields(&print_field);
1205 }
1206 }
1207 }
1208
1209 static void log_heap_roots() {
1210 LogStreamHandle(Trace, cds, map, oops) st;
1211 if (st.is_enabled()) {
1212 for (int i = 0; i < HeapShared::pending_roots()->length(); i++) {
1213 st.print("roots[%4d]: ", i);
1214 print_oop_with_requested_addr_cr(&st, HeapShared::pending_roots()->at(i));
1215 }
1216 }
1217 }
1218
1219 // The output looks like this. The first number is the requested address. The second number is
1220 // the narrowOop version of the requested address.
1221 // 0x00000007ffc7e840 (0xfff8fd08) java.lang.Class
1222 // 0x00000007ffc000f8 (0xfff8001f) [B length: 11
1223 static void print_oop_with_requested_addr_cr(outputStream* st, oop source_oop, bool print_addr = true) {
1224 if (source_oop == nullptr) {
1225 st->print_cr("null");
1226 } else {
1227 ResourceMark rm;
1228 oop requested_obj = ArchiveHeapWriter::source_obj_to_requested_obj(source_oop);
1229 if (print_addr) {
1230 st->print(PTR_FORMAT " ", p2i(requested_obj));
1231 }
1232 if (UseCompressedOops) {
1233 st->print("(0x%08x) ", CompressedOops::narrow_oop_value(requested_obj));
1234 }
1235 if (source_oop->is_array()) {
1236 int array_len = arrayOop(source_oop)->length();
1237 st->print_cr("%s length: %d", source_oop->klass()->external_name(), array_len);
1238 } else {
1239 st->print_cr("%s", source_oop->klass()->external_name());
1240 }
1241 }
1242 }
1243 #endif // INCLUDE_CDS_JAVA_HEAP
1244
1245 // Log all the data [base...top). Pretend that the base address
1246 // will be mapped to requested_base at run-time.
1247 static void log_as_hex(address base, address top, address requested_base, bool is_heap = false) {
1248 assert(top >= base, "must be");
1249
1250 LogStreamHandle(Trace, cds, map) lsh;
1251 if (lsh.is_enabled()) {
1252 int unitsize = sizeof(address);
1253 if (is_heap && UseCompressedOops) {
1254 // This makes the compressed oop pointers easier to read, but
1255 // longs and doubles will be split into two words.
1256 unitsize = sizeof(narrowOop);
1257 }
1258 os::print_hex_dump(&lsh, base, top, unitsize, 32, requested_base);
1259 }
1292 if (heap_info->is_used()) {
1293 log_heap_region(heap_info);
1294 }
1295 #endif
1296
1297 log_info(cds, map)("[End of CDS archive map]");
1298 }
1299 }; // end ArchiveBuilder::CDSMapLogger
1300
1301 void ArchiveBuilder::print_stats() {
1302 _alloc_stats.print_stats(int(_ro_region.used()), int(_rw_region.used()));
1303 }
1304
1305 void ArchiveBuilder::write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info) {
1306 // Make sure NUM_CDS_REGIONS (exported in cds.h) agrees with
1307 // MetaspaceShared::n_regions (internal to hotspot).
1308 assert(NUM_CDS_REGIONS == MetaspaceShared::n_regions, "sanity");
1309
1310 write_region(mapinfo, MetaspaceShared::rw, &_rw_region, /*read_only=*/false,/*allow_exec=*/false);
1311 write_region(mapinfo, MetaspaceShared::ro, &_ro_region, /*read_only=*/true, /*allow_exec=*/false);
1312
1313 // Split pointer map into read-write and read-only bitmaps
1314 ArchivePtrMarker::initialize_rw_ro_maps(&_rw_ptrmap, &_ro_ptrmap);
1315
1316 size_t bitmap_size_in_bytes;
1317 char* bitmap = mapinfo->write_bitmap_region(ArchivePtrMarker::rw_ptrmap(), ArchivePtrMarker::ro_ptrmap(), heap_info,
1318 bitmap_size_in_bytes);
1319
1320 if (heap_info->is_used()) {
1321 _total_heap_region_size = mapinfo->write_heap_region(heap_info);
1322 }
1323
1324 print_region_stats(mapinfo, heap_info);
1325
1326 mapinfo->set_requested_base((char*)MetaspaceShared::requested_base_address());
1327 mapinfo->set_header_crc(mapinfo->compute_header_crc());
1328 // After this point, we should not write any data into mapinfo->header() since this
1329 // would corrupt its checksum we have calculated before.
1330 mapinfo->write_header();
1331 mapinfo->close();
1332
1333 if (log_is_enabled(Info, cds)) {
1334 print_stats();
1335 }
1336
1337 if (log_is_enabled(Info, cds, map)) {
1343 }
1344
1345 void ArchiveBuilder::write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region, bool read_only, bool allow_exec) {
1346 mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1347 }
1348
1349 void ArchiveBuilder::print_region_stats(FileMapInfo *mapinfo, ArchiveHeapInfo* heap_info) {
1350 // Print statistics of all the regions
1351 const size_t bitmap_used = mapinfo->region_at(MetaspaceShared::bm)->used();
1352 const size_t bitmap_reserved = mapinfo->region_at(MetaspaceShared::bm)->used_aligned();
1353 const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
1354 bitmap_reserved +
1355 _total_heap_region_size;
1356 const size_t total_bytes = _ro_region.used() + _rw_region.used() +
1357 bitmap_used +
1358 _total_heap_region_size;
1359 const double total_u_perc = percent_of(total_bytes, total_reserved);
1360
1361 _rw_region.print(total_reserved);
1362 _ro_region.print(total_reserved);
1363
1364 print_bitmap_region_stats(bitmap_used, total_reserved);
1365
1366 if (heap_info->is_used()) {
1367 print_heap_region_stats(heap_info, total_reserved);
1368 }
1369
1370 log_debug(cds)("total : " SIZE_FORMAT_W(9) " [100.0%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used]",
1371 total_bytes, total_reserved, total_u_perc);
1372 }
1373
1374 void ArchiveBuilder::print_bitmap_region_stats(size_t size, size_t total_size) {
1375 log_debug(cds)("bm space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used]",
1376 size, size/double(total_size)*100.0, size);
1377 }
1378
1379 void ArchiveBuilder::print_heap_region_stats(ArchiveHeapInfo *info, size_t total_size) {
1380 char* start = info->buffer_start();
1381 size_t size = info->buffer_byte_size();
1382 char* top = start + size;
1383 log_debug(cds)("hp space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used] at " INTPTR_FORMAT,
1384 size, size/double(total_size)*100.0, size, p2i(start));
1385 }
1386
1387 void ArchiveBuilder::report_out_of_space(const char* name, size_t needed_bytes) {
1388 // This is highly unlikely to happen on 64-bits because we have reserved a 4GB space.
1389 // On 32-bit we reserve only 256MB so you could run out of space with 100,000 classes
1390 // or so.
1391 _rw_region.print_out_of_space_msg(name, needed_bytes);
1392 _ro_region.print_out_of_space_msg(name, needed_bytes);
1393
1394 log_error(cds)("Unable to allocate from '%s' region: Please reduce the number of shared classes.", name);
1395 MetaspaceShared::unrecoverable_writing_error();
1396 }
1397
1398
1399 #ifndef PRODUCT
1400 void ArchiveBuilder::assert_is_vm_thread() {
1401 assert(Thread::current()->is_VM_thread(), "ArchiveBuilder should be used only inside the VMThread");
1402 }
1403 #endif
|
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "cds/archiveBuilder.hpp"
27 #include "cds/archiveHeapWriter.hpp"
28 #include "cds/archiveUtils.hpp"
29 #include "cds/cdsConfig.hpp"
30 #include "cds/classPrelinker.hpp"
31 #include "cds/classPreloader.hpp"
32 #include "cds/cppVtables.hpp"
33 #include "cds/dumpAllocStats.hpp"
34 #include "cds/dynamicArchive.hpp"
35 #include "cds/finalImageRecipes.hpp"
36 #include "cds/heapShared.hpp"
37 #include "cds/metaspaceShared.hpp"
38 #include "cds/regeneratedClasses.hpp"
39 #include "classfile/classLoader.hpp"
40 #include "classfile/classLoaderDataShared.hpp"
41 #include "classfile/javaClasses.hpp"
42 #include "classfile/symbolTable.hpp"
43 #include "classfile/systemDictionaryShared.hpp"
44 #include "classfile/vmClasses.hpp"
45 #include "interpreter/abstractInterpreter.hpp"
46 #include "jvm.h"
47 #include "logging/log.hpp"
48 #include "logging/logStream.hpp"
49 #include "memory/allStatic.hpp"
50 #include "memory/memRegion.hpp"
51 #include "memory/resourceArea.hpp"
52 #include "oops/compressedKlass.inline.hpp"
53 #include "oops/instanceKlass.hpp"
54 #include "oops/objArrayKlass.hpp"
55 #include "oops/objArrayOop.inline.hpp"
56 #include "oops/oopHandle.inline.hpp"
57 #include "oops/trainingData.hpp"
58 #include "runtime/arguments.hpp"
59 #include "runtime/fieldDescriptor.inline.hpp"
60 #include "runtime/globals_extension.hpp"
61 #include "runtime/javaThread.hpp"
62 #include "runtime/sharedRuntime.hpp"
63 #include "utilities/align.hpp"
64 #include "utilities/bitMap.inline.hpp"
65 #include "utilities/formatBuffer.hpp"
66
67 ArchiveBuilder* ArchiveBuilder::_current = nullptr;
68
69 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
70 char* newtop = ArchiveBuilder::current()->_ro_region.top();
71 ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
72 }
73
74 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K, mtClassShared) {
75 _total_bytes = 0;
76 _objs = new (mtClassShared) GrowableArray<SourceObjInfo*>(128 * K, mtClassShared);
77 }
148
149 RelocateEmbeddedPointers relocator(builder, src_info->buffered_addr(), start);
150 _ptrmap.iterate(&relocator, start, end);
151 }
152
153 ArchiveBuilder::ArchiveBuilder() :
154 _current_dump_region(nullptr),
155 _buffer_bottom(nullptr),
156 _last_verified_top(nullptr),
157 _num_dump_regions_used(0),
158 _other_region_used_bytes(0),
159 _requested_static_archive_bottom(nullptr),
160 _requested_static_archive_top(nullptr),
161 _requested_dynamic_archive_bottom(nullptr),
162 _requested_dynamic_archive_top(nullptr),
163 _mapped_static_archive_bottom(nullptr),
164 _mapped_static_archive_top(nullptr),
165 _buffer_to_requested_delta(0),
166 _rw_region("rw", MAX_SHARED_DELTA),
167 _ro_region("ro", MAX_SHARED_DELTA),
168 _cc_region("cc", MAX_SHARED_DELTA),
169 _ptrmap(mtClassShared),
170 _rw_ptrmap(mtClassShared),
171 _ro_ptrmap(mtClassShared),
172 _cc_ptrmap(mtClassShared),
173 _rw_src_objs(),
174 _ro_src_objs(),
175 _src_obj_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
176 _buffered_to_src_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
177 _total_heap_region_size(0),
178 _estimated_metaspaceobj_bytes(0),
179 _estimated_hashtable_bytes(0)
180 {
181 _klasses = new (mtClassShared) GrowableArray<Klass*>(4 * K, mtClassShared);
182 _symbols = new (mtClassShared) GrowableArray<Symbol*>(256 * K, mtClassShared);
183 _entropy_seed = 0x12345678;
184 assert(_current == nullptr, "must be");
185 _current = this;
186 }
187
188 ArchiveBuilder::~ArchiveBuilder() {
189 assert(_current == this, "must be");
190 _current = nullptr;
191
192 for (int i = 0; i < _symbols->length(); i++) {
216 public:
217 GatherKlassesAndSymbols(ArchiveBuilder* builder) : _builder(builder) {}
218
219 virtual bool do_unique_ref(Ref* ref, bool read_only) {
220 return _builder->gather_klass_and_symbol(ref, read_only);
221 }
222 };
223
224 bool ArchiveBuilder::gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only) {
225 if (ref->obj() == nullptr) {
226 return false;
227 }
228 if (get_follow_mode(ref) != make_a_copy) {
229 return false;
230 }
231 if (ref->msotype() == MetaspaceObj::ClassType) {
232 Klass* klass = (Klass*)ref->obj();
233 assert(klass->is_klass(), "must be");
234 if (!is_excluded(klass)) {
235 _klasses->append(klass);
236 if (klass->is_hidden() && klass->is_instance_klass()) {
237 update_hidden_class_loader_type(InstanceKlass::cast(klass));
238 }
239 }
240 // See RunTimeClassInfo::get_for()
241 _estimated_metaspaceobj_bytes += align_up(BytesPerWord, SharedSpaceObjectAlignment);
242 } else if (ref->msotype() == MetaspaceObj::SymbolType) {
243 // Make sure the symbol won't be GC'ed while we are dumping the archive.
244 Symbol* sym = (Symbol*)ref->obj();
245 sym->increment_refcount();
246 _symbols->append(sym);
247 }
248
249 int bytes = ref->size() * BytesPerWord;
250 _estimated_metaspaceobj_bytes += align_up(bytes, SharedSpaceObjectAlignment);
251
252 return true; // recurse
253 }
254
255 void ArchiveBuilder::gather_klasses_and_symbols() {
256 ResourceMark rm;
257 log_info(cds)("Gathering classes and symbols ... ");
258 GatherKlassesAndSymbols doit(this);
277 // During -Xshare:dump, the order of Symbol creation is strictly determined by
278 // the SharedClassListFile (class loading is done in a single thread and the JIT
279 // is disabled). Also, Symbols are allocated in monotonically increasing addresses
280 // (see Symbol::operator new(size_t, int)). So if we iterate the Symbols by
281 // ascending address order, we ensure that all Symbols are copied into deterministic
282 // locations in the archive.
283 //
284 // TODO: in the future, if we want to produce deterministic contents in the
285 // dynamic archive, we might need to sort the symbols alphabetically (also see
286 // DynamicArchiveBuilder::sort_methods()).
287 log_info(cds)("Sorting symbols ... ");
288 _symbols->sort(compare_symbols_by_address);
289 sort_klasses();
290
291 // TODO -- we need a proper estimate for the archived modules, etc,
292 // but this should be enough for now
293 _estimated_metaspaceobj_bytes += 200 * 1024 * 1024;
294 }
295 }
296
297 #if INCLUDE_CDS_JAVA_HEAP
298
299 void ArchiveBuilder::update_hidden_class_loader_type(InstanceKlass* ik) {
300 s2 classloader_type;
301 if (HeapShared::is_lambda_form_klass(ik)) {
302 assert(CDSConfig::is_dumping_invokedynamic(), "lambda form classes are archived only if ArchiveInvokeDynamic is true");
303 classloader_type = ClassLoader::BOOT_LOADER;
304 } else if (HeapShared::is_lambda_proxy_klass(ik)) {
305 oop loader = ik->class_loader();
306
307 if (loader == nullptr) {
308 classloader_type = ClassLoader::BOOT_LOADER;
309 } else if (SystemDictionary::is_platform_class_loader(loader)) {
310 classloader_type = ClassLoader::PLATFORM_LOADER;
311 } else if (SystemDictionary::is_system_class_loader(loader)) {
312 classloader_type = ClassLoader::APP_LOADER;
313 } else {
314 ShouldNotReachHere();
315 }
316 } else {
317 ShouldNotReachHere();
318 }
319
320 ik->set_shared_class_loader_type(classloader_type);
321 if (HeapShared::is_lambda_proxy_klass(ik)) {
322 InstanceKlass* nest_host = ik->nest_host_not_null();
323 ik->set_shared_classpath_index(nest_host->shared_classpath_index());
324 }
325 }
326
327 #endif //INCLUDE_CDS_JAVA_HEAP
328
329 int ArchiveBuilder::compare_symbols_by_address(Symbol** a, Symbol** b) {
330 if (a[0] < b[0]) {
331 return -1;
332 } else {
333 assert(a[0] > b[0], "Duplicated symbol %s unexpected", (*a)->as_C_string());
334 return 1;
335 }
336 }
337
338 int ArchiveBuilder::compare_klass_by_name(Klass** a, Klass** b) {
339 return a[0]->name()->fast_compare(b[0]->name());
340 }
341
342 void ArchiveBuilder::sort_klasses() {
343 log_info(cds)("Sorting classes ... ");
344 _klasses->sort(compare_klass_by_name);
345 }
346
347 size_t ArchiveBuilder::estimate_archive_size() {
348 // size of the symbol table and two dictionaries, plus the RunTimeClassInfo's
349 size_t symbol_table_est = SymbolTable::estimate_size_for_archive();
350 size_t dictionary_est = SystemDictionaryShared::estimate_size_for_archive();
351 size_t training_data_est = TrainingData::estimate_size_for_archive();
352 _estimated_hashtable_bytes = symbol_table_est + dictionary_est + training_data_est;
353
354 if (CDSConfig::is_dumping_final_static_archive()) {
355 _estimated_hashtable_bytes += 200 * 1024 * 1024; // FIXME -- need to iterate archived symbols??
356 }
357
358 if (CDSConfig::is_dumping_dynamic_archive()) {
359 // Some extra space for traning data. Be generous. Unused areas will be trimmed from the archive file.
360 _estimated_hashtable_bytes += 200 * 1024 * 1024;
361 }
362 size_t total = 0;
363
364 total += _estimated_metaspaceobj_bytes;
365 total += _estimated_hashtable_bytes;
366
367 // allow fragmentation at the end of each dump region
368 total += _total_dump_regions * MetaspaceShared::core_region_alignment();
369
370 log_info(cds)("_estimated_hashtable_bytes = " SIZE_FORMAT " + " SIZE_FORMAT " = " SIZE_FORMAT,
371 symbol_table_est, dictionary_est, _estimated_hashtable_bytes);
372 log_info(cds)("_estimated_metaspaceobj_bytes = " SIZE_FORMAT, _estimated_metaspaceobj_bytes);
373 log_info(cds)("total estimate bytes = " SIZE_FORMAT, total);
374
375 return align_up(total, MetaspaceShared::core_region_alignment());
376 }
377
378 address ArchiveBuilder::reserve_buffer() {
379 size_t buffer_size = estimate_archive_size();
380 ReservedSpace rs(buffer_size, MetaspaceShared::core_region_alignment(), os::vm_page_size());
381 if (!rs.is_reserved()) {
455
456 iterate_roots(it);
457 }
458
459 class GatherSortedSourceObjs : public MetaspaceClosure {
460 ArchiveBuilder* _builder;
461
462 public:
463 GatherSortedSourceObjs(ArchiveBuilder* builder) : _builder(builder) {}
464
465 virtual bool do_ref(Ref* ref, bool read_only) {
466 return _builder->gather_one_source_obj(ref, read_only);
467 }
468 };
469
470 bool ArchiveBuilder::gather_one_source_obj(MetaspaceClosure::Ref* ref, bool read_only) {
471 address src_obj = ref->obj();
472 if (src_obj == nullptr) {
473 return false;
474 }
475
476 remember_embedded_pointer_in_enclosing_obj(ref);
477 if (RegeneratedClasses::has_been_regenerated(src_obj)) {
478 // No need to copy it. We will later relocate it to point to the regenerated klass/method.
479 return false;
480 }
481
482 FollowMode follow_mode = get_follow_mode(ref);
483 SourceObjInfo src_info(ref, read_only, follow_mode);
484 bool created;
485 SourceObjInfo* p = _src_obj_table.put_if_absent(src_obj, src_info, &created);
486 if (created) {
487 if (_src_obj_table.maybe_grow()) {
488 log_info(cds, hashtables)("Expanded _src_obj_table table to %d", _src_obj_table.table_size());
489 }
490 }
491
492 #ifdef ASSERT
493 if (ref->msotype() == MetaspaceObj::MethodType) {
494 Method* m = (Method*)ref->obj();
495 assert(!RegeneratedClasses::has_been_regenerated((address)m->method_holder()),
496 "Should not archive methods in a class that has been regenerated");
497 }
498 #endif
499
500 assert(p->read_only() == src_info.read_only(), "must be");
560 _rw_src_objs.remember_embedded_pointer(src_info, ref);
561 }
562 }
563 }
564
565 void ArchiveBuilder::gather_source_objs() {
566 ResourceMark rm;
567 log_info(cds)("Gathering all archivable objects ... ");
568 gather_klasses_and_symbols();
569 GatherSortedSourceObjs doit(this);
570 iterate_sorted_roots(&doit);
571 doit.finish();
572 }
573
574 bool ArchiveBuilder::is_excluded(Klass* klass) {
575 if (klass->is_instance_klass()) {
576 InstanceKlass* ik = InstanceKlass::cast(klass);
577 return SystemDictionaryShared::is_excluded_class(ik);
578 } else if (klass->is_objArray_klass()) {
579 Klass* bottom = ObjArrayKlass::cast(klass)->bottom_klass();
580 if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_shared_static(bottom)) {
581 // The bottom class is in the static archive so it's clearly not excluded.
582 assert(CDSConfig::is_dumping_dynamic_archive(), "sanity");
583 return false;
584 } else if (bottom->is_instance_klass()) {
585 return SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(bottom));
586 }
587 }
588
589 return false;
590 }
591
592 ArchiveBuilder::FollowMode ArchiveBuilder::get_follow_mode(MetaspaceClosure::Ref *ref) {
593 address obj = ref->obj();
594 if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_in_shared_metaspace(obj)) {
595 // Don't dump existing shared metadata again.
596 return point_to_it;
597 } else if (ref->msotype() == MetaspaceObj::MethodDataType ||
598 ref->msotype() == MetaspaceObj::MethodCountersType ||
599 ref->msotype() == MetaspaceObj::KlassTrainingDataType ||
600 ref->msotype() == MetaspaceObj::MethodTrainingDataType ||
601 ref->msotype() == MetaspaceObj::CompileTrainingDataType) {
602 return TrainingData::need_data() ? make_a_copy : set_to_null;
603 } else {
604 if (ref->msotype() == MetaspaceObj::ClassType) {
605 Klass* klass = (Klass*)ref->obj();
606 assert(klass->is_klass(), "must be");
607 if (is_excluded(klass)) {
608 ResourceMark rm;
609 log_debug(cds, dynamic)("Skipping class (excluded): %s", klass->external_name());
610 return set_to_null;
611 }
612 }
613
614 return make_a_copy;
615 }
616 }
617
618 void ArchiveBuilder::start_dump_region(DumpRegion* next) {
619 address bottom = _last_verified_top;
620 address top = (address)(current_dump_region()->top());
621 _other_region_used_bytes += size_t(top - bottom);
622
755
756 log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d", p2i(src), p2i(dest), bytes);
757 src_info->set_buffered_addr((address)dest);
758
759 _alloc_stats.record(src_info->msotype(), int(newtop - oldtop), src_info->read_only());
760 }
761
762 // This is used by code that hand-assembles data structures, such as the LambdaProxyClassKey, that are
763 // not handled by MetaspaceClosure.
764 void ArchiveBuilder::write_pointer_in_buffer(address* ptr_location, address src_addr) {
765 assert(is_in_buffer_space(ptr_location), "must be");
766 if (src_addr == nullptr) {
767 *ptr_location = nullptr;
768 ArchivePtrMarker::clear_pointer(ptr_location);
769 } else {
770 *ptr_location = get_buffered_addr(src_addr);
771 ArchivePtrMarker::mark_pointer(ptr_location);
772 }
773 }
774
775 void ArchiveBuilder::mark_and_relocate_to_buffered_addr(address* ptr_location) {
776 assert(*ptr_location != nullptr, "sanity");
777 if (!is_in_mapped_static_archive(*ptr_location)) {
778 *ptr_location = get_buffered_addr(*ptr_location);
779 }
780 ArchivePtrMarker::mark_pointer(ptr_location);
781 }
782
783 address ArchiveBuilder::get_buffered_addr(address src_addr) const {
784 SourceObjInfo* p = _src_obj_table.get(src_addr);
785 assert(p != nullptr, "src_addr " INTPTR_FORMAT " is used but has not been archived",
786 p2i(src_addr));
787
788 return p->buffered_addr();
789 }
790
791 bool ArchiveBuilder::has_been_archived(address src_addr) const {
792 SourceObjInfo* p = _src_obj_table.get(src_addr);
793 return (p != nullptr);
794 }
795
796 address ArchiveBuilder::get_source_addr(address buffered_addr) const {
797 assert(is_in_buffer_space(buffered_addr), "must be");
798 address* src_p = _buffered_to_src_table.get(buffered_addr);
799 assert(src_p != nullptr && *src_p != nullptr, "must be");
800 return *src_p;
801 }
802
803 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
804 for (int i = 0; i < src_objs->objs()->length(); i++) {
805 src_objs->relocate(i, this);
806 }
807 }
808
809 void ArchiveBuilder::relocate_metaspaceobj_embedded_pointers() {
810 log_info(cds)("Relocating embedded pointers in core regions ... ");
811 relocate_embedded_pointers(&_rw_src_objs);
812 relocate_embedded_pointers(&_ro_src_objs);
813 }
814
815 #define ADD_COUNT(x) \
816 x += 1; \
817 x ## _p += preloaded; \
818 x ## _i += inited;
819
820 #define DECLARE_INSTANCE_KLASS_COUNTER(x) \
821 int x = 0; \
822 int x ## _p = 0; \
823 int x ## _i = 0; \
824
825 void ArchiveBuilder::make_klasses_shareable() {
826 DECLARE_INSTANCE_KLASS_COUNTER(num_instance_klasses);
827 DECLARE_INSTANCE_KLASS_COUNTER(num_boot_klasses);
828 DECLARE_INSTANCE_KLASS_COUNTER(num_vm_klasses);
829 DECLARE_INSTANCE_KLASS_COUNTER(num_platform_klasses);
830 DECLARE_INSTANCE_KLASS_COUNTER(num_app_klasses);
831 DECLARE_INSTANCE_KLASS_COUNTER(num_hidden_klasses);
832 DECLARE_INSTANCE_KLASS_COUNTER(num_unlinked_klasses);
833 DECLARE_INSTANCE_KLASS_COUNTER(num_unregistered_klasses);
834 int num_obj_array_klasses = 0;
835 int num_type_array_klasses = 0;
836
837 int boot_unlinked = 0;
838 int platform_unlinked = 0;
839 int app_unlinked = 0;
840 int unreg_unlinked = 0;
841
842 for (int i = 0; i < klasses()->length(); i++) {
843 // Some of the code in ConstantPool::remove_unshareable_info() requires the classes
844 // to be in linked state, so it must be call here before the next loop, which returns
845 // all classes to unlinked state.
846 Klass* k = get_buffered_addr(klasses()->at(i));
847 if (k->is_instance_klass()) {
848 InstanceKlass::cast(k)->constants()->remove_unshareable_info();
849 }
850 }
851
852 for (int i = 0; i < klasses()->length(); i++) {
853 const char* type;
854 const char* unlinked = "";
855 const char* hidden = "";
856 const char* generated = "";
857 const char* preloaded_msg = "";
858 const char* inited_msg = "";
859 Klass* k = get_buffered_addr(klasses()->at(i));
860 k->remove_java_mirror();
861 if (k->is_objArray_klass()) {
862 // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
863 // on their array classes.
864 num_obj_array_klasses ++;
865 type = "array";
866 } else if (k->is_typeArray_klass()) {
867 num_type_array_klasses ++;
868 type = "array";
869 k->remove_unshareable_info();
870 } else {
871 assert(k->is_instance_klass(), " must be");
872 InstanceKlass* ik = InstanceKlass::cast(k);
873 InstanceKlass* src_ik = get_source_addr(ik);
874 int preloaded = ClassPreloader::is_preloaded_class(src_ik);
875 int inited = ik->has_preinitialized_mirror();
876 ADD_COUNT(num_instance_klasses);
877 if (CDSConfig::is_dumping_dynamic_archive()) {
878 // For static dump, class loader type are already set.
879 ik->assign_class_loader_type();
880 }
881 if (ik->is_hidden()) {
882 oop loader = k->class_loader();
883 if (loader == nullptr) {
884 type = "boot";
885 ADD_COUNT(num_boot_klasses);
886 } else if (loader == SystemDictionary::java_platform_loader()) {
887 type = "plat";
888 ADD_COUNT(num_platform_klasses);
889 } else if (loader == SystemDictionary::java_system_loader()) {
890 type = "app";
891 ADD_COUNT(num_app_klasses);
892 } else {
893 type = "bad";
894 assert(0, "shouldn't happen");
895 }
896 } else if (ik->is_shared_boot_class()) {
897 type = "boot";
898 ADD_COUNT(num_boot_klasses);
899 } else if (ik->is_shared_platform_class()) {
900 type = "plat";
901 ADD_COUNT(num_platform_klasses);
902 } else if (ik->is_shared_app_class()) {
903 type = "app";
904 ADD_COUNT(num_app_klasses);
905 } else {
906 assert(ik->is_shared_unregistered_class(), "must be");
907 type = "unreg";
908 ADD_COUNT(num_unregistered_klasses);
909 }
910
911 if (ClassPreloader::is_vm_class(src_ik)) {
912 ADD_COUNT(num_vm_klasses);
913 }
914
915 if (!ik->is_linked()) {
916 ADD_COUNT(num_unlinked_klasses);
917 unlinked = " unlinked";
918 if (ik->is_shared_boot_class()) {
919 boot_unlinked ++;
920 } else if (ik->is_shared_platform_class()) {
921 platform_unlinked ++;
922 } else if (ik->is_shared_app_class()) {
923 app_unlinked ++;
924 } else {
925 unreg_unlinked ++;
926 }
927 }
928
929 if (ik->is_hidden()) {
930 ADD_COUNT(num_hidden_klasses);
931 hidden = " hidden";
932 }
933
934 if (ik->is_generated_shared_class()) {
935 generated = " generated";
936 }
937 if (preloaded) {
938 preloaded_msg = " preloaded";
939 }
940 if (inited) {
941 inited_msg = " inited";
942 }
943
944 MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread::current(), ik);
945 ik->remove_unshareable_info();
946 }
947
948 if (log_is_enabled(Debug, cds, class)) {
949 ResourceMark rm;
950 log_debug(cds, class)("klasses[%5d] = " PTR_FORMAT " %-5s %s%s%s%s%s%s", i,
951 p2i(to_requested(k)), type, k->external_name(),
952 hidden, unlinked, generated, preloaded_msg, inited_msg);
953 }
954 }
955
956 #define STATS_FORMAT "= %5d, preloaded = %5d, inited = %5d"
957 #define STATS_PARAMS(x) num_ ## x, num_ ## x ## _p, num_ ## x ## _i
958
959 log_info(cds)("Number of classes %d", num_instance_klasses + num_obj_array_klasses + num_type_array_klasses);
960 log_info(cds)(" instance classes " STATS_FORMAT, STATS_PARAMS(instance_klasses));
961 log_info(cds)(" boot " STATS_FORMAT, STATS_PARAMS(boot_klasses));
962 log_info(cds)(" vm " STATS_FORMAT, STATS_PARAMS(vm_klasses));
963 log_info(cds)(" platform " STATS_FORMAT, STATS_PARAMS(platform_klasses));
964 log_info(cds)(" app " STATS_FORMAT, STATS_PARAMS(app_klasses));
965 log_info(cds)(" unregistered " STATS_FORMAT, STATS_PARAMS(unregistered_klasses));
966 log_info(cds)(" (hidden) " STATS_FORMAT, STATS_PARAMS(hidden_klasses));
967 log_info(cds)(" (unlinked) " STATS_FORMAT ", boot = %d, plat = %d, app = %d, unreg = %d",
968 STATS_PARAMS(unlinked_klasses),
969 boot_unlinked, platform_unlinked,
970 app_unlinked, unreg_unlinked);
971 log_info(cds)(" obj array classes = %5d", num_obj_array_klasses);
972 log_info(cds)(" type array classes = %5d", num_type_array_klasses);
973 log_info(cds)(" symbols = %5d", _symbols->length());
974
975 #undef STATS_FORMAT
976 #undef STATS_PARAMS
977
978 DynamicArchive::make_array_klasses_shareable();
979 }
980
981 void ArchiveBuilder::make_training_data_shareable() {
982 auto clean_td = [&] (address& src_obj, SourceObjInfo& info) {
983 if (!is_in_buffer_space(info.buffered_addr())) {
984 return;
985 }
986
987 if (info.msotype() == MetaspaceObj::KlassTrainingDataType ||
988 info.msotype() == MetaspaceObj::MethodTrainingDataType ||
989 info.msotype() == MetaspaceObj::CompileTrainingDataType) {
990 TrainingData* buffered_td = (TrainingData*)info.buffered_addr();
991 buffered_td->remove_unshareable_info();
992 } else if (info.msotype() == MetaspaceObj::MethodDataType) {
993 MethodData* buffered_mdo = (MethodData*)info.buffered_addr();
994 buffered_mdo->remove_unshareable_info();
995 } else if (info.msotype() == MetaspaceObj::MethodCountersType) {
996 MethodCounters* buffered_mc = (MethodCounters*)info.buffered_addr();
997 buffered_mc->remove_unshareable_info();
998 }
999 };
1000 _src_obj_table.iterate_all(clean_td);
1001 }
1002
1003 void ArchiveBuilder::serialize_dynamic_archivable_items(SerializeClosure* soc) {
1004 SymbolTable::serialize_shared_table_header(soc, false);
1005 SystemDictionaryShared::serialize_dictionary_headers(soc, false);
1006 DynamicArchive::serialize_array_klasses(soc);
1007 ClassPreloader::serialize(soc, false);
1008 FinalImageRecipes::serialize(soc, false);
1009 TrainingData::serialize_training_data(soc);
1010 }
1011
1012 uintx ArchiveBuilder::buffer_to_offset(address p) const {
1013 address requested_p = to_requested(p);
1014 assert(requested_p >= _requested_static_archive_bottom, "must be");
1015 return requested_p - _requested_static_archive_bottom;
1016 }
1017
1018 uintx ArchiveBuilder::any_to_offset(address p) const {
1019 if (is_in_mapped_static_archive(p)) {
1020 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
1021 return p - _mapped_static_archive_bottom;
1022 }
1023 if (!is_in_buffer_space(p)) {
1024 // p must be a "source" address
1025 p = get_buffered_addr(p);
1026 }
1027 return buffer_to_offset(p);
1028 }
1029
1030 void ArchiveBuilder::start_cc_region() {
1031 ro_region()->pack();
1032 start_dump_region(&_cc_region);
1033 }
1034
1035 void ArchiveBuilder::end_cc_region() {
1036 _cc_region.pack();
1037 }
1038
1039 #if INCLUDE_CDS_JAVA_HEAP
1040 narrowKlass ArchiveBuilder::get_requested_narrow_klass(Klass* k) {
1041 assert(CDSConfig::is_dumping_heap(), "sanity");
1042 k = get_buffered_klass(k);
1043 Klass* requested_k = to_requested(k);
1044 address narrow_klass_base = _requested_static_archive_bottom; // runtime encoding base == runtime mapping start
1045 const int narrow_klass_shift = ArchiveHeapWriter::precomputed_narrow_klass_shift;
1046 return CompressedKlassPointers::encode_not_null(requested_k, narrow_klass_base, narrow_klass_shift);
1047 }
1048 #endif // INCLUDE_CDS_JAVA_HEAP
1049
1050 // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
1051 // so that the archive can be mapped to the "requested" location without runtime relocation.
1052 //
1053 // - See ArchiveBuilder header for the definition of "buffer", "mapped" and "requested"
1054 // - ArchivePtrMarker::ptrmap() marks all the pointers in the rw/ro regions
1055 // - Every pointer must have one of the following values:
1056 // [a] nullptr:
1057 // No relocation is needed. Remove this pointer from ptrmap so we don't need to
1058 // consider it at runtime.
1108 } else {
1109 assert(_builder->is_in_mapped_static_archive(*p), "old pointer must point inside buffer space or mapped static archive");
1110 *p += _mapped_to_requested_static_archive_delta;
1111 assert(_builder->is_in_requested_static_archive(*p), "new pointer must point inside requested archive");
1112 }
1113 }
1114 _max_non_null_offset = offset;
1115 }
1116
1117 return true; // keep iterating
1118 }
1119
1120 void doit() {
1121 ArchivePtrMarker::ptrmap()->iterate(this);
1122 ArchivePtrMarker::compact(_max_non_null_offset);
1123 }
1124 };
1125
1126
1127 void ArchiveBuilder::relocate_to_requested() {
1128 if (!ro_region()->is_packed()) {
1129 ro_region()->pack();
1130 }
1131
1132 size_t my_archive_size = buffer_top() - buffer_bottom();
1133
1134 if (CDSConfig::is_dumping_static_archive()) {
1135 _requested_static_archive_top = _requested_static_archive_bottom + my_archive_size;
1136 RelocateBufferToRequested<true> patcher(this);
1137 patcher.doit();
1138 } else {
1139 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
1140 _requested_dynamic_archive_top = _requested_dynamic_archive_bottom + my_archive_size;
1141 RelocateBufferToRequested<false> patcher(this);
1142 patcher.doit();
1143 }
1144 }
1145
1146 // Write detailed info to a mapfile to analyze contents of the archive.
1147 // static dump:
1148 // java -Xshare:dump -Xlog:cds+map=trace:file=cds.map:none:filesize=0
1149 // dynamic dump:
1150 // java -cp MyApp.jar -XX:ArchiveClassesAtExit=MyApp.jsa \
1258 base = requested_base;
1259 top = requested_base + size;
1260 log_info(cds, map)("[%-18s " PTR_FORMAT " - " PTR_FORMAT " " SIZE_FORMAT_W(9) " bytes]",
1261 name, p2i(base), p2i(top), size);
1262 }
1263
1264 #if INCLUDE_CDS_JAVA_HEAP
1265 static void log_heap_region(ArchiveHeapInfo* heap_info) {
1266 MemRegion r = heap_info->buffer_region();
1267 address start = address(r.start()); // start of the current oop inside the buffer
1268 address end = address(r.end());
1269 log_region("heap", start, end, ArchiveHeapWriter::buffered_addr_to_requested_addr(start));
1270
1271 LogStreamHandle(Info, cds, map) st;
1272
1273 while (start < end) {
1274 size_t byte_size;
1275 oop source_oop = ArchiveHeapWriter::buffered_addr_to_source_obj(start);
1276 address requested_start = ArchiveHeapWriter::buffered_addr_to_requested_addr(start);
1277 st.print(PTR_FORMAT ": @@ Object ", p2i(requested_start));
1278 int permobj_segment = -1;
1279 int permobj_segment_length = -1;
1280
1281 if (source_oop != nullptr) {
1282 // This is a regular oop that got archived.
1283 print_oop_with_requested_addr_cr(&st, source_oop, false);
1284 byte_size = source_oop->size() * BytesPerWord;
1285 } else if (start == ArchiveHeapWriter::buffered_heap_roots_addr()) {
1286 // HeapShared::roots() is copied specially, so it doesn't exist in
1287 // ArchiveHeapWriter::BufferOffsetToSourceObjectTable.
1288 // See ArchiveHeapWriter::copy_roots_to_buffer().
1289 st.print_cr("HeapShared::roots[%d]", HeapShared::pending_roots()->length());
1290 byte_size = ArchiveHeapWriter::heap_roots_word_size() * BytesPerWord;
1291 } else if ((byte_size = ArchiveHeapWriter::get_filler_size_at(start)) > 0) {
1292 // We have a filler oop, which also does not exist in BufferOffsetToSourceObjectTable.
1293 st.print_cr("filler " SIZE_FORMAT " bytes", byte_size);
1294 } else if ((permobj_segment = ArchiveHeapWriter::get_permobj_segment_at(start, &byte_size, &permobj_segment_length)) >= 0) {
1295 st.print_cr("permobj_%d[%d] %zu bytes", permobj_segment, permobj_segment_length, byte_size);
1296 } else {
1297 ShouldNotReachHere();
1298 }
1299
1300 address oop_end = start + byte_size;
1301 log_as_hex(start, oop_end, requested_start, /*is_heap=*/true);
1302
1303 if (source_oop != nullptr) {
1304 log_oop_details(heap_info, source_oop, /*buffered_addr=*/start);
1305 } else if (start == ArchiveHeapWriter::buffered_heap_roots_addr()) {
1306 log_heap_roots();
1307 } else if (permobj_segment >= 0) {
1308 log_permobj_segment(permobj_segment, permobj_segment_length);
1309 }
1310 start = oop_end;
1311 }
1312 }
1313
1314 // ArchivedFieldPrinter is used to print the fields of archived objects. We can't
1315 // use _source_obj->print_on(), because we want to print the oop fields
1316 // in _source_obj with their requested addresses using print_oop_with_requested_addr_cr().
1317 class ArchivedFieldPrinter : public FieldClosure {
1318 ArchiveHeapInfo* _heap_info;
1319 outputStream* _st;
1320 oop _source_obj;
1321 address _buffered_addr;
1322 public:
1323 ArchivedFieldPrinter(ArchiveHeapInfo* heap_info, outputStream* st, oop src_obj, address buffered_addr) :
1324 _heap_info(heap_info), _st(st), _source_obj(src_obj), _buffered_addr(buffered_addr) {}
1325
1326 void do_field(fieldDescriptor* fd) {
1327 _st->print(" - ");
1328 BasicType ft = fd->field_type();
1389 print_oop_with_requested_addr_cr(&st, source_obj_array->obj_at(i));
1390 }
1391 } else {
1392 st.print_cr(" - fields (" SIZE_FORMAT " words):", source_oop->size());
1393 ArchivedFieldPrinter print_field(heap_info, &st, source_oop, buffered_addr);
1394 InstanceKlass::cast(source_klass)->print_nonstatic_fields(&print_field);
1395 }
1396 }
1397 }
1398
1399 static void log_heap_roots() {
1400 LogStreamHandle(Trace, cds, map, oops) st;
1401 if (st.is_enabled()) {
1402 for (int i = 0; i < HeapShared::pending_roots()->length(); i++) {
1403 st.print("roots[%4d]: ", i);
1404 print_oop_with_requested_addr_cr(&st, HeapShared::pending_roots()->at(i));
1405 }
1406 }
1407 }
1408
1409 static void log_permobj_segment(int permobj_segment, int permobj_segment_length) {
1410 LogStreamHandle(Trace, cds, map, oops) st;
1411 if (st.is_enabled()) {
1412 for (int i = 0; i < permobj_segment_length; i++) {
1413 st.print("permobj_%d[%4d]: ", permobj_segment, i);
1414 print_oop_with_requested_addr_cr(&st, ArchiveHeapWriter::get_permobj_source_addr(permobj_segment, i));
1415 }
1416 }
1417 }
1418
1419
1420 // The output looks like this. The first number is the requested address. The second number is
1421 // the narrowOop version of the requested address.
1422 // 0x00000007ffc7e840 (0xfff8fd08) java.lang.Class
1423 // 0x00000007ffc000f8 (0xfff8001f) [B length: 11
1424 static void print_oop_with_requested_addr_cr(outputStream* st, oop source_oop, bool print_addr = true) {
1425 if (source_oop == nullptr) {
1426 st->print_cr("null");
1427 } else {
1428 ResourceMark rm;
1429 oop requested_obj = ArchiveHeapWriter::source_obj_to_requested_obj(source_oop);
1430 if (print_addr) {
1431 st->print(PTR_FORMAT " ", p2i(requested_obj));
1432 }
1433 if (UseCompressedOops) {
1434 st->print("(0x%08x) ", CompressedOops::narrow_oop_value(requested_obj));
1435 }
1436 if (source_oop->is_array()) {
1437 int array_len = arrayOop(source_oop)->length();
1438 st->print_cr("%s length: %d", source_oop->klass()->external_name(), array_len);
1439 } else {
1440 st->print("%s", source_oop->klass()->external_name());
1441 if (java_lang_invoke_MethodType::is_instance(source_oop)) {
1442 st->print(" ");
1443 java_lang_invoke_MethodType::print_signature(source_oop, st);
1444 }
1445 st->cr();
1446 }
1447 }
1448 }
1449 #endif // INCLUDE_CDS_JAVA_HEAP
1450
1451 // Log all the data [base...top). Pretend that the base address
1452 // will be mapped to requested_base at run-time.
1453 static void log_as_hex(address base, address top, address requested_base, bool is_heap = false) {
1454 assert(top >= base, "must be");
1455
1456 LogStreamHandle(Trace, cds, map) lsh;
1457 if (lsh.is_enabled()) {
1458 int unitsize = sizeof(address);
1459 if (is_heap && UseCompressedOops) {
1460 // This makes the compressed oop pointers easier to read, but
1461 // longs and doubles will be split into two words.
1462 unitsize = sizeof(narrowOop);
1463 }
1464 os::print_hex_dump(&lsh, base, top, unitsize, 32, requested_base);
1465 }
1498 if (heap_info->is_used()) {
1499 log_heap_region(heap_info);
1500 }
1501 #endif
1502
1503 log_info(cds, map)("[End of CDS archive map]");
1504 }
1505 }; // end ArchiveBuilder::CDSMapLogger
1506
1507 void ArchiveBuilder::print_stats() {
1508 _alloc_stats.print_stats(int(_ro_region.used()), int(_rw_region.used()));
1509 }
1510
1511 void ArchiveBuilder::write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info) {
1512 // Make sure NUM_CDS_REGIONS (exported in cds.h) agrees with
1513 // MetaspaceShared::n_regions (internal to hotspot).
1514 assert(NUM_CDS_REGIONS == MetaspaceShared::n_regions, "sanity");
1515
1516 write_region(mapinfo, MetaspaceShared::rw, &_rw_region, /*read_only=*/false,/*allow_exec=*/false);
1517 write_region(mapinfo, MetaspaceShared::ro, &_ro_region, /*read_only=*/true, /*allow_exec=*/false);
1518 write_region(mapinfo, MetaspaceShared::cc, &_cc_region, /*read_only=*/false,/*allow_exec=*/true);
1519
1520 // Split pointer map into read-write and read-only bitmaps
1521 ArchivePtrMarker::initialize_rw_ro_cc_maps(&_rw_ptrmap, &_ro_ptrmap, &_cc_ptrmap);
1522
1523 size_t bitmap_size_in_bytes;
1524 char* bitmap = mapinfo->write_bitmap_region(ArchivePtrMarker::rw_ptrmap(),
1525 ArchivePtrMarker::ro_ptrmap(),
1526 ArchivePtrMarker::cc_ptrmap(),
1527 heap_info,
1528 bitmap_size_in_bytes);
1529
1530 if (heap_info->is_used()) {
1531 _total_heap_region_size = mapinfo->write_heap_region(heap_info);
1532 }
1533
1534 print_region_stats(mapinfo, heap_info);
1535
1536 mapinfo->set_requested_base((char*)MetaspaceShared::requested_base_address());
1537 mapinfo->set_header_crc(mapinfo->compute_header_crc());
1538 // After this point, we should not write any data into mapinfo->header() since this
1539 // would corrupt its checksum we have calculated before.
1540 mapinfo->write_header();
1541 mapinfo->close();
1542
1543 if (log_is_enabled(Info, cds)) {
1544 print_stats();
1545 }
1546
1547 if (log_is_enabled(Info, cds, map)) {
1553 }
1554
1555 void ArchiveBuilder::write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region, bool read_only, bool allow_exec) {
1556 mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1557 }
1558
1559 void ArchiveBuilder::print_region_stats(FileMapInfo *mapinfo, ArchiveHeapInfo* heap_info) {
1560 // Print statistics of all the regions
1561 const size_t bitmap_used = mapinfo->region_at(MetaspaceShared::bm)->used();
1562 const size_t bitmap_reserved = mapinfo->region_at(MetaspaceShared::bm)->used_aligned();
1563 const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
1564 bitmap_reserved +
1565 _total_heap_region_size;
1566 const size_t total_bytes = _ro_region.used() + _rw_region.used() +
1567 bitmap_used +
1568 _total_heap_region_size;
1569 const double total_u_perc = percent_of(total_bytes, total_reserved);
1570
1571 _rw_region.print(total_reserved);
1572 _ro_region.print(total_reserved);
1573 _cc_region.print(total_reserved);
1574
1575 print_bitmap_region_stats(bitmap_used, total_reserved);
1576
1577 if (heap_info->is_used()) {
1578 print_heap_region_stats(heap_info, total_reserved);
1579 }
1580
1581 log_debug(cds)("total : " SIZE_FORMAT_W(9) " [100.0%% of total] out of " SIZE_FORMAT_W(9) " bytes [%5.1f%% used]",
1582 total_bytes, total_reserved, total_u_perc);
1583 }
1584
1585 void ArchiveBuilder::print_bitmap_region_stats(size_t size, size_t total_size) {
1586 log_debug(cds)("bm space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used]",
1587 size, size/double(total_size)*100.0, size);
1588 }
1589
1590 void ArchiveBuilder::print_heap_region_stats(ArchiveHeapInfo *info, size_t total_size) {
1591 char* start = info->buffer_start();
1592 size_t size = info->buffer_byte_size();
1593 char* top = start + size;
1594 log_debug(cds)("hp space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used] at " INTPTR_FORMAT,
1595 size, size/double(total_size)*100.0, size, p2i(start));
1596 }
1597
1598 void ArchiveBuilder::report_out_of_space(const char* name, size_t needed_bytes) {
1599 // This is highly unlikely to happen on 64-bits because we have reserved a 4GB space.
1600 // On 32-bit we reserve only 256MB so you could run out of space with 100,000 classes
1601 // or so.
1602 _rw_region.print_out_of_space_msg(name, needed_bytes);
1603 _ro_region.print_out_of_space_msg(name, needed_bytes);
1604
1605 log_error(cds)("Unable to allocate from '%s' region: Please reduce the number of shared classes.", name);
1606 MetaspaceShared::unrecoverable_writing_error();
1607 }
|