< prev index next >

src/hotspot/share/gc/shared/space.cpp

Print this page
*** 27,10 ***
--- 27,11 ---
  #include "classfile/vmSymbols.hpp"
  #include "gc/shared/blockOffsetTable.inline.hpp"
  #include "gc/shared/collectedHeap.inline.hpp"
  #include "gc/shared/genCollectedHeap.hpp"
  #include "gc/shared/genOopClosures.inline.hpp"
+ #include "gc/shared/slidingForwarding.inline.hpp"
  #include "gc/shared/space.hpp"
  #include "gc/shared/space.inline.hpp"
  #include "gc/shared/spaceDecorator.inline.hpp"
  #include "memory/iterator.inline.hpp"
  #include "memory/universe.hpp"

*** 344,10 ***
--- 345,11 ---
  void CompactibleSpace::clear(bool mangle_space) {
    Space::clear(mangle_space);
    _compaction_top = bottom();
  }
  
+ template <bool ALT_FWD>
  HeapWord* CompactibleSpace::forward(oop q, size_t size,
                                      CompactPoint* cp, HeapWord* compact_top) {
    // q is alive
    // First check if we should switch compaction space
    assert(this == cp->space, "'this' should be current compaction space.");

*** 368,17 ***
      compaction_max_size = pointer_delta(cp->space->end(), compact_top);
    }
  
    // store the forwarding pointer into the mark word
    if (cast_from_oop<HeapWord*>(q) != compact_top) {
!     q->forward_to(cast_to_oop(compact_top));
      assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
    } else {
      // if the object isn't moving we can just set the mark to the default
      // mark and handle it specially later on.
      q->init_mark();
!     assert(q->forwardee() == NULL, "should be forwarded to NULL");
    }
  
    compact_top += size;
  
    // we need to update the offset table so that the beginnings of objects can be
--- 370,17 ---
      compaction_max_size = pointer_delta(cp->space->end(), compact_top);
    }
  
    // store the forwarding pointer into the mark word
    if (cast_from_oop<HeapWord*>(q) != compact_top) {
!     SlidingForwarding::forward_to<ALT_FWD>(q, cast_to_oop(compact_top));
      assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
    } else {
      // if the object isn't moving we can just set the mark to the default
      // mark and handle it specially later on.
      q->init_mark();
!     assert(SlidingForwarding::is_not_forwarded(q), "should not be forwarded");
    }
  
    compact_top += size;
  
    // we need to update the offset table so that the beginnings of objects can be

*** 391,24 ***
  }
  
  #if INCLUDE_SERIALGC
  
  void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
!   scan_and_forward(this, cp);
  }
  
  void CompactibleSpace::adjust_pointers() {
    // Check first is there is any work to do.
    if (used() == 0) {
      return;   // Nothing to do.
    }
  
!   scan_and_adjust_pointers(this);
  }
  
  void CompactibleSpace::compact() {
!   scan_and_compact(this);
  }
  
  #endif // INCLUDE_SERIALGC
  
  void Space::print_short() const { print_short_on(tty); }
--- 393,36 ---
  }
  
  #if INCLUDE_SERIALGC
  
  void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
!   if (UseAltGCForwarding) {
+     scan_and_forward<true>(this, cp);
+   } else {
+     scan_and_forward<false>(this, cp);
+   }
  }
  
  void CompactibleSpace::adjust_pointers() {
    // Check first is there is any work to do.
    if (used() == 0) {
      return;   // Nothing to do.
    }
  
!   if (UseAltGCForwarding) {
+     scan_and_adjust_pointers<true>(this);
+   } else {
+     scan_and_adjust_pointers<false>(this);
+   }
  }
  
  void CompactibleSpace::compact() {
!   if (UseAltGCForwarding) {
+     scan_and_compact<true>(this);
+   } else {
+     scan_and_compact<false>(this);
+   }
  }
  
  #endif // INCLUDE_SERIALGC
  
  void Space::print_short() const { print_short_on(tty); }

*** 584,26 ***
    if (factor > 0) {
      size -= size/factor;
    }
    size = align_object_size(size);
  
!   const size_t array_header_size = typeArrayOopDesc::header_size(T_INT);
    if (size >= align_object_size(array_header_size)) {
      size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
      // allocate uninitialized int array
      typeArrayOop t = (typeArrayOop) cast_to_oop(allocate(size));
      assert(t != NULL, "allocation should succeed");
!     t->set_mark(markWord::prototype());
!     t->set_klass(Universe::intArrayKlassObj());
      t->set_length((int)length);
    } else {
      assert(size == CollectedHeap::min_fill_size(),
             "size for smallest fake object doesn't match");
      instanceOop obj = (instanceOop) cast_to_oop(allocate(size));
!     obj->set_mark(markWord::prototype());
!     obj->set_klass_gap(0);
!     obj->set_klass(vmClasses::Object_klass());
    }
  }
  
  HeapWord* OffsetTableContigSpace::initialize_threshold() {
    return _offsets.initialize_threshold();
--- 598,34 ---
    if (factor > 0) {
      size -= size/factor;
    }
    size = align_object_size(size);
  
!   const size_t array_header_size = (arrayOopDesc::base_offset_in_bytes(T_INT) + BytesPerWord) / BytesPerWord;
    if (size >= align_object_size(array_header_size)) {
      size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
      // allocate uninitialized int array
      typeArrayOop t = (typeArrayOop) cast_to_oop(allocate(size));
      assert(t != NULL, "allocation should succeed");
!     if (UseCompactObjectHeaders) {
!       t->set_mark(Universe::intArrayKlassObj()->prototype_header());
+     } else {
+       t->set_mark(markWord::prototype());
+       t->set_klass(Universe::intArrayKlassObj());
+     }
      t->set_length((int)length);
    } else {
      assert(size == CollectedHeap::min_fill_size(),
             "size for smallest fake object doesn't match");
      instanceOop obj = (instanceOop) cast_to_oop(allocate(size));
!     if (UseCompactObjectHeaders) {
!       obj->set_mark(vmClasses::Object_klass()->prototype_header());
!     } else {
+       obj->set_mark(markWord::prototype());
+       obj->set_klass_gap(0);
+       obj->set_klass(vmClasses::Object_klass());
+     }
    }
  }
  
  HeapWord* OffsetTableContigSpace::initialize_threshold() {
    return _offsets.initialize_threshold();
< prev index next >