< prev index next >

src/hotspot/share/cds/archiveBuilder.cpp

Print this page
*** 37,11 ***
--- 37,13 ---
  #include "logging/log.hpp"
  #include "logging/logStream.hpp"
  #include "memory/allStatic.hpp"
  #include "memory/memRegion.hpp"
  #include "memory/resourceArea.hpp"
+ #include "oops/compressedKlass.inline.hpp"
  #include "oops/instanceKlass.hpp"
+ #include "oops/klass.inline.hpp"
  #include "oops/objArrayKlass.hpp"
  #include "oops/objArrayOop.inline.hpp"
  #include "oops/oopHandle.inline.hpp"
  #include "runtime/arguments.hpp"
  #include "runtime/globals_extension.hpp"

*** 216,12 ***
      Klass* klass = (Klass*)ref->obj();
      assert(klass->is_klass(), "must be");
      if (!is_excluded(klass)) {
        _klasses->append(klass);
      }
!     // See RunTimeClassInfo::get_for()
!     _estimated_metaspaceobj_bytes += align_up(BytesPerWord, SharedSpaceObjectAlignment);
    } else if (ref->msotype() == MetaspaceObj::SymbolType) {
      // Make sure the symbol won't be GC'ed while we are dumping the archive.
      Symbol* sym = (Symbol*)ref->obj();
      sym->increment_refcount();
      _symbols->append(sym);
--- 218,14 ---
      Klass* klass = (Klass*)ref->obj();
      assert(klass->is_klass(), "must be");
      if (!is_excluded(klass)) {
        _klasses->append(klass);
      }
!     // See ArchiveBuilder::make_shallow_copies: make sure we have enough space for both maximum
!     // Klass alignment as well as the RuntimeInfo* pointer we will embed in front of a Klass.
+     _estimated_metaspaceobj_bytes += align_up(BytesPerWord, KlassAlignmentInBytes) +
+         align_up(sizeof(void*), SharedSpaceObjectAlignment);
    } else if (ref->msotype() == MetaspaceObj::SymbolType) {
      // Make sure the symbol won't be GC'ed while we are dumping the archive.
      Symbol* sym = (Symbol*)ref->obj();
      sym->increment_refcount();
      _symbols->append(sym);

*** 614,35 ***
    char* oldtop;
    char* newtop;
  
    oldtop = dump_region->top();
    if (ref->msotype() == MetaspaceObj::ClassType) {
!     // Save a pointer immediate in front of an InstanceKlass, so
!     // we can do a quick lookup from InstanceKlass* -> RunTimeClassInfo*
!     // without building another hashtable. See RunTimeClassInfo::get_for()
!     // in systemDictionaryShared.cpp.
      Klass* klass = (Klass*)src;
      if (klass->is_instance_klass()) {
        SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass));
        dump_region->allocate(sizeof(address));
      }
    }
-   dest = dump_region->allocate(bytes);
    newtop = dump_region->top();
  
    memcpy(dest, src, bytes);
  
    intptr_t* archived_vtable = CppVtables::get_archived_vtable(ref->msotype(), (address)dest);
    if (archived_vtable != NULL) {
      *(address*)dest = (address)archived_vtable;
      ArchivePtrMarker::mark_pointer((address*)dest);
    }
  
!   log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d", p2i(src), p2i(dest), bytes);
    src_info->set_dumped_addr((address)dest);
  
    _alloc_stats.record(ref->msotype(), int(newtop - oldtop), src_info->read_only());
  }
  
  address ArchiveBuilder::get_dumped_addr(address src_obj) const {
    SourceObjInfo* p = _src_obj_table.get(src_obj);
    assert(p != NULL, "must be");
--- 618,41 ---
    char* oldtop;
    char* newtop;
  
    oldtop = dump_region->top();
    if (ref->msotype() == MetaspaceObj::ClassType) {
!     // Reserve space for a pointer immediately in front of an InstanceKlass. That space will
!     // later be used to store the RuntimeClassInfo* pointer directly in front of the archived
!     // InstanceKlass, in order to have a quick lookup InstanceKlass* -> RunTimeClassInfo*
!     // without building another hashtable. See RunTimeClassInfo::get_for()/::set_for() for
+     // details.
      Klass* klass = (Klass*)src;
      if (klass->is_instance_klass()) {
        SystemDictionaryShared::validate_before_archiving(InstanceKlass::cast(klass));
        dump_region->allocate(sizeof(address));
      }
+     dest = dump_region->allocate(bytes, KlassAlignmentInBytes);
+   } else {
+     dest = dump_region->allocate(bytes);
    }
    newtop = dump_region->top();
  
    memcpy(dest, src, bytes);
  
    intptr_t* archived_vtable = CppVtables::get_archived_vtable(ref->msotype(), (address)dest);
    if (archived_vtable != NULL) {
      *(address*)dest = (address)archived_vtable;
      ArchivePtrMarker::mark_pointer((address*)dest);
    }
  
!   log_trace(cds)("Copy: " PTR_FORMAT " ==> " PTR_FORMAT " %d (%s)", p2i(src), p2i(dest), bytes,
+                  MetaspaceObj::type_name(ref->msotype()));
    src_info->set_dumped_addr((address)dest);
  
    _alloc_stats.record(ref->msotype(), int(newtop - oldtop), src_info->read_only());
+ 
+   DEBUG_ONLY(_alloc_stats.verify((int)dump_region->used(), src_info->read_only()));
  }
  
  address ArchiveBuilder::get_dumped_addr(address src_obj) const {
    SourceObjInfo* p = _src_obj_table.get(src_obj);
    assert(p != NULL, "must be");

*** 728,10 ***
--- 738,17 ---
      const char* unlinked = "";
      const char* hidden = "";
      const char* generated = "";
      Klass* k = klasses()->at(i);
      k->remove_java_mirror();
+     Klass* requested_k = to_requested(k);
+ #ifdef _LP64
+     narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
+     k->set_prototype_header(markWord::prototype().set_narrow_klass(nk));
+ #else
+     k->set_prototype_header(markWord::prototype());
+ #endif
      if (k->is_objArray_klass()) {
        // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info
        // on their array classes.
        num_obj_array_klasses ++;
        type = "array";

*** 819,11 ***
  void ArchiveBuilder::relocate_klass_ptr(oop o) {
    assert(DumpSharedSpaces, "sanity");
    Klass* k = get_relocated_klass(o->klass());
    Klass* requested_k = to_requested(k);
    narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
!   o->set_narrow_klass(nk);
  }
  
  // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
  // so that the archive can be mapped to the "requested" location without runtime relocation.
  //
--- 836,13 ---
  void ArchiveBuilder::relocate_klass_ptr(oop o) {
    assert(DumpSharedSpaces, "sanity");
    Klass* k = get_relocated_klass(o->klass());
    Klass* requested_k = to_requested(k);
    narrowKlass nk = CompressedKlassPointers::encode_not_null(requested_k, _requested_static_archive_bottom);
! #ifdef _LP64
+   o->set_mark(o->mark().set_narrow_klass(nk));
+ #endif
  }
  
  // RelocateBufferToRequested --- Relocate all the pointers in rw/ro,
  // so that the archive can be mapped to the "requested" location without runtime relocation.
  //
< prev index next >