< prev index next >

src/hotspot/share/cds/archiveBuilder.cpp

Print this page

  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "cds/archiveBuilder.hpp"
  27 #include "cds/archiveUtils.hpp"
  28 #include "cds/cppVtables.hpp"
  29 #include "cds/dumpAllocStats.hpp"
  30 #include "cds/heapShared.hpp"
  31 #include "cds/metaspaceShared.hpp"
  32 #include "classfile/classLoaderDataShared.hpp"
  33 #include "classfile/symbolTable.hpp"
  34 #include "classfile/systemDictionaryShared.hpp"
  35 #include "classfile/vmClasses.hpp"
  36 #include "interpreter/abstractInterpreter.hpp"
  37 #include "logging/log.hpp"
  38 #include "logging/logStream.hpp"
  39 #include "memory/allStatic.hpp"
  40 #include "memory/memRegion.hpp"
  41 #include "memory/resourceArea.hpp"

  42 #include "oops/instanceKlass.hpp"

  43 #include "oops/objArrayKlass.hpp"
  44 #include "oops/objArrayOop.inline.hpp"
  45 #include "oops/oopHandle.inline.hpp"
  46 #include "runtime/arguments.hpp"
  47 #include "runtime/globals_extension.hpp"
  48 #include "runtime/javaThread.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "utilities/align.hpp"
  51 #include "utilities/bitMap.inline.hpp"
  52 #include "utilities/formatBuffer.hpp"
  53 
  54 ArchiveBuilder* ArchiveBuilder::_current = NULL;
  55 
  56 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
  57   char* newtop = ArchiveBuilder::current()->_ro_region.top();
  58   ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
  59 }
  60 
  61 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K) {
  62   _total_bytes = 0;

 201   GatherKlassesAndSymbols(ArchiveBuilder* builder) : _builder(builder) {}
 202 
 203   virtual bool do_unique_ref(Ref* ref, bool read_only) {
 204     return _builder->gather_klass_and_symbol(ref, read_only);
 205   }
 206 };
 207 
 208 bool ArchiveBuilder::gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only) {
 209   if (ref->obj() == NULL) {
 210     return false;
 211   }
 212   if (get_follow_mode(ref) != make_a_copy) {
 213     return false;
 214   }
 215   if (ref->msotype() == MetaspaceObj::ClassType) {
 216     Klass* klass = (Klass*)ref->obj();
 217     assert(klass->is_klass(), "must be");
 218     if (!is_excluded(klass)) {
 219       _klasses->append(klass);
 220     }
 221     // See RunTimeClassInfo::get_for()
 222     _estimated_metaspaceobj_bytes += align_up(BytesPerWord, SharedSpaceObjectAlignment);


 223   } else if (ref->msotype() == MetaspaceObj::SymbolType) {
 224     // Make sure the symbol won't be GC'ed while we are dumping the archive.
 225     Symbol* sym = (Symbol*)ref->obj();
 226     sym->increment_refcount();
 227     _symbols->append(sym);
 228   }
 229 
 230   int bytes = ref->size() * BytesPerWord;
 231   _estimated_metaspaceobj_bytes += align_up(bytes, SharedSpaceObjectAlignment);
 232 
 233   return true; // recurse
 234 }
 235 
 236 void ArchiveBuilder::gather_klasses_and_symbols() {
 237   ResourceMark rm;
 238   log_info(cds)("Gathering classes and symbols ... ");
 239   GatherKlassesAndSymbols doit(this);
 240   iterate_roots(&doit, /*is_relocating_pointers=*/false);
 241 #if INCLUDE_CDS_JAVA_HEAP
 242   if (is_dumping_full_module_graph()) {

 599 }
 600 
 601 void ArchiveBuilder::make_shallow_copies(DumpRegion *dump_region,
 602                                          const ArchiveBuilder::SourceObjList* src_objs) {
 603   for (int i = 0; i < src_objs->objs()->length(); i++) {
 604     make_shallow_copy(dump_region, src_objs->objs()->at(i));
 605   }
 606   log_info(cds)("done (%d objects)", src_objs->objs()->length());
 607 }
 608 
 609 void ArchiveBuilder::make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info) {
 610   MetaspaceClosure::Ref* ref = src_info->ref();
 611   address src = ref->obj();
 612   int bytes = src_info->size_in_bytes();
 613   char* dest;
 614   char* oldtop;
 615   char* newtop;
 616 
 617   oldtop = dump_region->top();
 618   if (ref->msotype() == MetaspaceObj::ClassType) {
 619     // Save a pointer immediate in front of an InstanceKlass, so
 620     // we can do a quick lookup from InstanceKlass* -> RunTimeClassInfo*
 621     // without building another hashtable. See RunTimeClassInfo::get_for()
 622     // in systemDictionaryShared.cpp.

 623     Klass* klass = (Klass*)src;
 624     if (klass->is_instance_klass()) {
 625       SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass));
 626       dump_region->allocate(sizeof(address));
 627     }



 628   }
 629   dest = dump_region->allocate(bytes);
 630   newtop = dump_region->top();
 631 
 632   memcpy(dest, src, bytes);
 633   {
 634     bool created;
 635     _buffered_to_src_table.put_if_absent((address)dest, src, &created);
 636     assert(created, "must be");
 637     if (_buffered_to_src_table.maybe_grow()) {
 638       log_info(cds, hashtables)("Expanded _buffered_to_src_table table to %d", _buffered_to_src_table.table_size());
 639     }
 640   }
 641 
 642   intptr_t* archived_vtable = CppVtables::get_archived_vtable(ref->msotype(), (address)dest);
 643   if (archived_vtable != NULL) {
 644     *(address*)dest = (address)archived_vtable;
 645     ArchivePtrMarker::mark_pointer((address*)dest);
 646   }
 647 
 648   log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d", p2i(src), p2i(dest), bytes);

 649   src_info->set_buffered_addr((address)dest);
 650 
 651   _alloc_stats.record(ref->msotype(), int(newtop - oldtop), src_info->read_only());


 652 }
 653 
 654 address ArchiveBuilder::get_buffered_addr(address src_addr) const {
 655   SourceObjInfo* p = _src_obj_table.get(src_addr);
 656   assert(p != NULL, "must be");
 657 
 658   return p->buffered_addr();
 659 }
 660 
 661 address ArchiveBuilder::get_source_addr(address buffered_addr) const {
 662   assert(is_in_buffer_space(buffered_addr), "must be");
 663   address* src_p = _buffered_to_src_table.get(buffered_addr);
 664   assert(src_p != NULL && *src_p != NULL, "must be");
 665   return *src_p;
 666 }
 667 
 668 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
 669   for (int i = 0; i < src_objs->objs()->length(); i++) {
 670     src_objs->relocate(i, this);
 671   }

 728 }
 729 
 730 void ArchiveBuilder::make_klasses_shareable() {
 731   int num_instance_klasses = 0;
 732   int num_boot_klasses = 0;
 733   int num_platform_klasses = 0;
 734   int num_app_klasses = 0;
 735   int num_hidden_klasses = 0;
 736   int num_unlinked_klasses = 0;
 737   int num_unregistered_klasses = 0;
 738   int num_obj_array_klasses = 0;
 739   int num_type_array_klasses = 0;
 740 
 741   for (int i = 0; i < klasses()->length(); i++) {
 742     const char* type;
 743     const char* unlinked = "";
 744     const char* hidden = "";
 745     const char* generated = "";
 746     Klass* k = klasses()->at(i);
 747     k->remove_java_mirror();







 748     if (k->is_objArray_klass()) {
 749       // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
 750       // on their array classes.
 751       num_obj_array_klasses ++;
 752       type = "array";
 753     } else if (k->is_typeArray_klass()) {
 754       num_type_array_klasses ++;
 755       type = "array";
 756       k->remove_unshareable_info();
 757     } else {
 758       assert(k->is_instance_klass(), " must be");
 759       num_instance_klasses ++;
 760       InstanceKlass* ik = InstanceKlass::cast(k);
 761       if (DynamicDumpSharedSpaces) {
 762         // For static dump, class loader type are already set.
 763         ik->assign_class_loader_type();
 764       }
 765       if (ik->is_shared_boot_class()) {
 766         type = "boot";
 767         num_boot_klasses ++;

 819   address requested_p = to_requested(p);
 820   assert(requested_p >= _requested_static_archive_bottom, "must be");
 821   return requested_p - _requested_static_archive_bottom;
 822 }
 823 
 824 uintx ArchiveBuilder::any_to_offset(address p) const {
 825   if (is_in_mapped_static_archive(p)) {
 826     assert(DynamicDumpSharedSpaces, "must be");
 827     return p - _mapped_static_archive_bottom;
 828   }
 829   return buffer_to_offset(p);
 830 }
 831 
 832 // Update a Java object to point its Klass* to the address whene
 833 // the class would be mapped at runtime.
 834 void ArchiveBuilder::relocate_klass_ptr_of_oop(oop o) {
 835   assert(DumpSharedSpaces, "sanity");
 836   Klass* k = get_buffered_klass(o->klass());
 837   Klass* requested_k = to_requested(k);
 838   narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
 839   o->set_narrow_klass(nk);


 840 }
 841 
 842 // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
 843 // so that the archive can be mapped to the "requested" location without runtime relocation.
 844 //
 845 // - See ArchiveBuilder header for the definition of "buffer", "mapped" and "requested"
 846 // - ArchivePtrMarker::ptrmap() marks all the pointers in the rw/ro regions
 847 // - Every pointer must have one of the following values:
 848 //   [a] NULL:
 849 //       No relocation is needed. Remove this pointer from ptrmap so we don't need to
 850 //       consider it at runtime.
 851 //   [b] Points into an object X which is inside the buffer:
 852 //       Adjust this pointer by _buffer_to_requested_delta, so it points to X
 853 //       when the archive is mapped at the requested location.
 854 //   [c] Points into an object Y which is inside mapped static archive:
 855 //       - This happens only during dynamic dump
 856 //       - Adjust this pointer by _mapped_to_requested_static_archive_delta,
 857 //         so it points to Y when the static archive is mapped at the requested location.
 858 template <bool STATIC_DUMP>
 859 class RelocateBufferToRequested : public BitMapClosure {

  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "cds/archiveBuilder.hpp"
  27 #include "cds/archiveUtils.hpp"
  28 #include "cds/cppVtables.hpp"
  29 #include "cds/dumpAllocStats.hpp"
  30 #include "cds/heapShared.hpp"
  31 #include "cds/metaspaceShared.hpp"
  32 #include "classfile/classLoaderDataShared.hpp"
  33 #include "classfile/symbolTable.hpp"
  34 #include "classfile/systemDictionaryShared.hpp"
  35 #include "classfile/vmClasses.hpp"
  36 #include "interpreter/abstractInterpreter.hpp"
  37 #include "logging/log.hpp"
  38 #include "logging/logStream.hpp"
  39 #include "memory/allStatic.hpp"
  40 #include "memory/memRegion.hpp"
  41 #include "memory/resourceArea.hpp"
  42 #include "oops/compressedKlass.inline.hpp"
  43 #include "oops/instanceKlass.hpp"
  44 #include "oops/klass.inline.hpp"
  45 #include "oops/objArrayKlass.hpp"
  46 #include "oops/objArrayOop.inline.hpp"
  47 #include "oops/oopHandle.inline.hpp"
  48 #include "runtime/arguments.hpp"
  49 #include "runtime/globals_extension.hpp"
  50 #include "runtime/javaThread.hpp"
  51 #include "runtime/sharedRuntime.hpp"
  52 #include "utilities/align.hpp"
  53 #include "utilities/bitMap.inline.hpp"
  54 #include "utilities/formatBuffer.hpp"
  55 
  56 ArchiveBuilder* ArchiveBuilder::_current = NULL;
  57 
  58 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
  59   char* newtop = ArchiveBuilder::current()->_ro_region.top();
  60   ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
  61 }
  62 
  63 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K) {
  64   _total_bytes = 0;

 203   GatherKlassesAndSymbols(ArchiveBuilder* builder) : _builder(builder) {}
 204 
 205   virtual bool do_unique_ref(Ref* ref, bool read_only) {
 206     return _builder->gather_klass_and_symbol(ref, read_only);
 207   }
 208 };
 209 
 210 bool ArchiveBuilder::gather_klass_and_symbol(MetaspaceClosure::Ref* ref, bool read_only) {
 211   if (ref->obj() == NULL) {
 212     return false;
 213   }
 214   if (get_follow_mode(ref) != make_a_copy) {
 215     return false;
 216   }
 217   if (ref->msotype() == MetaspaceObj::ClassType) {
 218     Klass* klass = (Klass*)ref->obj();
 219     assert(klass->is_klass(), "must be");
 220     if (!is_excluded(klass)) {
 221       _klasses->append(klass);
 222     }
 223     // See ArchiveBuilder::make_shallow_copies: make sure we have enough space for both maximum
 224     // Klass alignment as well as the RuntimeInfo* pointer we will embed in front of a Klass.
 225     _estimated_metaspaceobj_bytes += align_up(BytesPerWord, KlassAlignmentInBytes) +
 226         align_up(sizeof(void*), SharedSpaceObjectAlignment);
 227   } else if (ref->msotype() == MetaspaceObj::SymbolType) {
 228     // Make sure the symbol won't be GC'ed while we are dumping the archive.
 229     Symbol* sym = (Symbol*)ref->obj();
 230     sym->increment_refcount();
 231     _symbols->append(sym);
 232   }
 233 
 234   int bytes = ref->size() * BytesPerWord;
 235   _estimated_metaspaceobj_bytes += align_up(bytes, SharedSpaceObjectAlignment);
 236 
 237   return true; // recurse
 238 }
 239 
 240 void ArchiveBuilder::gather_klasses_and_symbols() {
 241   ResourceMark rm;
 242   log_info(cds)("Gathering classes and symbols ... ");
 243   GatherKlassesAndSymbols doit(this);
 244   iterate_roots(&doit, /*is_relocating_pointers=*/false);
 245 #if INCLUDE_CDS_JAVA_HEAP
 246   if (is_dumping_full_module_graph()) {

 603 }
 604 
 605 void ArchiveBuilder::make_shallow_copies(DumpRegion *dump_region,
 606                                          const ArchiveBuilder::SourceObjList* src_objs) {
 607   for (int i = 0; i < src_objs->objs()->length(); i++) {
 608     make_shallow_copy(dump_region, src_objs->objs()->at(i));
 609   }
 610   log_info(cds)("done (%d objects)", src_objs->objs()->length());
 611 }
 612 
 613 void ArchiveBuilder::make_shallow_copy(DumpRegion *dump_region, SourceObjInfo* src_info) {
 614   MetaspaceClosure::Ref* ref = src_info->ref();
 615   address src = ref->obj();
 616   int bytes = src_info->size_in_bytes();
 617   char* dest;
 618   char* oldtop;
 619   char* newtop;
 620 
 621   oldtop = dump_region->top();
 622   if (ref->msotype() == MetaspaceObj::ClassType) {
 623     // Reserve space for a pointer immediately in front of an InstanceKlass. That space will
 624     // later be used to store the RuntimeClassInfo* pointer directly in front of the archived
 625     // InstanceKlass, in order to have a quick lookup InstanceKlass* -> RunTimeClassInfo*
 626     // without building another hashtable. See RunTimeClassInfo::get_for()/::set_for() for
 627     // details.
 628     Klass* klass = (Klass*)src;
 629     if (klass->is_instance_klass()) {
 630       SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass));
 631       dump_region->allocate(sizeof(address));
 632     }
 633     dest = dump_region->allocate(bytes, KlassAlignmentInBytes);
 634   } else {
 635     dest = dump_region->allocate(bytes);
 636   }

 637   newtop = dump_region->top();
 638 
 639   memcpy(dest, src, bytes);
 640   {
 641     bool created;
 642     _buffered_to_src_table.put_if_absent((address)dest, src, &created);
 643     assert(created, "must be");
 644     if (_buffered_to_src_table.maybe_grow()) {
 645       log_info(cds, hashtables)("Expanded _buffered_to_src_table table to %d", _buffered_to_src_table.table_size());
 646     }
 647   }
 648 
 649   intptr_t* archived_vtable = CppVtables::get_archived_vtable(ref->msotype(), (address)dest);
 650   if (archived_vtable != NULL) {
 651     *(address*)dest = (address)archived_vtable;
 652     ArchivePtrMarker::mark_pointer((address*)dest);
 653   }
 654 
 655   log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d (%s)", p2i(src), p2i(dest), bytes,
 656                  MetaspaceObj::type_name(ref->msotype()));
 657   src_info->set_buffered_addr((address)dest);
 658 
 659   _alloc_stats.record(ref->msotype(), int(newtop - oldtop), src_info->read_only());
 660 
 661   DEBUG_ONLY(_alloc_stats.verify((int)dump_region->used(), src_info->read_only()));
 662 }
 663 
 664 address ArchiveBuilder::get_buffered_addr(address src_addr) const {
 665   SourceObjInfo* p = _src_obj_table.get(src_addr);
 666   assert(p != NULL, "must be");
 667 
 668   return p->buffered_addr();
 669 }
 670 
 671 address ArchiveBuilder::get_source_addr(address buffered_addr) const {
 672   assert(is_in_buffer_space(buffered_addr), "must be");
 673   address* src_p = _buffered_to_src_table.get(buffered_addr);
 674   assert(src_p != NULL && *src_p != NULL, "must be");
 675   return *src_p;
 676 }
 677 
 678 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
 679   for (int i = 0; i < src_objs->objs()->length(); i++) {
 680     src_objs->relocate(i, this);
 681   }

 738 }
 739 
 740 void ArchiveBuilder::make_klasses_shareable() {
 741   int num_instance_klasses = 0;
 742   int num_boot_klasses = 0;
 743   int num_platform_klasses = 0;
 744   int num_app_klasses = 0;
 745   int num_hidden_klasses = 0;
 746   int num_unlinked_klasses = 0;
 747   int num_unregistered_klasses = 0;
 748   int num_obj_array_klasses = 0;
 749   int num_type_array_klasses = 0;
 750 
 751   for (int i = 0; i < klasses()->length(); i++) {
 752     const char* type;
 753     const char* unlinked = "";
 754     const char* hidden = "";
 755     const char* generated = "";
 756     Klass* k = klasses()->at(i);
 757     k->remove_java_mirror();
 758     Klass* requested_k = to_requested(k);
 759 #ifdef _LP64
 760     narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
 761     k->set_prototype_header(markWord::prototype().set_narrow_klass(nk));
 762 #else
 763     k->set_prototype_header(markWord::prototype());
 764 #endif
 765     if (k->is_objArray_klass()) {
 766       // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
 767       // on their array classes.
 768       num_obj_array_klasses ++;
 769       type = "array";
 770     } else if (k->is_typeArray_klass()) {
 771       num_type_array_klasses ++;
 772       type = "array";
 773       k->remove_unshareable_info();
 774     } else {
 775       assert(k->is_instance_klass(), " must be");
 776       num_instance_klasses ++;
 777       InstanceKlass* ik = InstanceKlass::cast(k);
 778       if (DynamicDumpSharedSpaces) {
 779         // For static dump, class loader type are already set.
 780         ik->assign_class_loader_type();
 781       }
 782       if (ik->is_shared_boot_class()) {
 783         type = "boot";
 784         num_boot_klasses ++;

 836   address requested_p = to_requested(p);
 837   assert(requested_p >= _requested_static_archive_bottom, "must be");
 838   return requested_p - _requested_static_archive_bottom;
 839 }
 840 
 841 uintx ArchiveBuilder::any_to_offset(address p) const {
 842   if (is_in_mapped_static_archive(p)) {
 843     assert(DynamicDumpSharedSpaces, "must be");
 844     return p - _mapped_static_archive_bottom;
 845   }
 846   return buffer_to_offset(p);
 847 }
 848 
 849 // Update a Java object to point its Klass* to the address whene
 850 // the class would be mapped at runtime.
 851 void ArchiveBuilder::relocate_klass_ptr_of_oop(oop o) {
 852   assert(DumpSharedSpaces, "sanity");
 853   Klass* k = get_buffered_klass(o->klass());
 854   Klass* requested_k = to_requested(k);
 855   narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
 856 #ifdef _LP64
 857   o->set_mark(o->mark().set_narrow_klass(nk));
 858 #endif
 859 }
 860 
 861 // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
 862 // so that the archive can be mapped to the "requested" location without runtime relocation.
 863 //
 864 // - See ArchiveBuilder header for the definition of "buffer", "mapped" and "requested"
 865 // - ArchivePtrMarker::ptrmap() marks all the pointers in the rw/ro regions
 866 // - Every pointer must have one of the following values:
 867 //   [a] NULL:
 868 //       No relocation is needed. Remove this pointer from ptrmap so we don't need to
 869 //       consider it at runtime.
 870 //   [b] Points into an object X which is inside the buffer:
 871 //       Adjust this pointer by _buffer_to_requested_delta, so it points to X
 872 //       when the archive is mapped at the requested location.
 873 //   [c] Points into an object Y which is inside mapped static archive:
 874 //       - This happens only during dynamic dump
 875 //       - Adjust this pointer by _mapped_to_requested_static_archive_delta,
 876 //         so it points to Y when the static archive is mapped at the requested location.
 877 template <bool STATIC_DUMP>
 878 class RelocateBufferToRequested : public BitMapClosure {
< prev index next >