< prev index next >

src/hotspot/share/gc/shared/space.cpp

Print this page
@@ -27,10 +27,11 @@
  #include "classfile/vmSymbols.hpp"
  #include "gc/shared/blockOffsetTable.inline.hpp"
  #include "gc/shared/collectedHeap.inline.hpp"
  #include "gc/shared/genCollectedHeap.hpp"
  #include "gc/shared/genOopClosures.inline.hpp"
+ #include "gc/shared/slidingForwarding.inline.hpp"
  #include "gc/shared/space.hpp"
  #include "gc/shared/space.inline.hpp"
  #include "gc/shared/spaceDecorator.inline.hpp"
  #include "memory/iterator.inline.hpp"
  #include "memory/universe.hpp"

@@ -345,11 +346,11 @@
    Space::clear(mangle_space);
    _compaction_top = bottom();
  }
  
  HeapWord* CompactibleSpace::forward(oop q, size_t size,
-                                     CompactPoint* cp, HeapWord* compact_top) {
+                                     CompactPoint* cp, HeapWord* compact_top, SlidingForwarding* const forwarding) {
    // q is alive
    // First check if we should switch compaction space
    assert(this == cp->space, "'this' should be current compaction space.");
    size_t compaction_max_size = pointer_delta(end(), compact_top);
    while (size > compaction_max_size) {

@@ -368,17 +369,17 @@
      compaction_max_size = pointer_delta(cp->space->end(), compact_top);
    }
  
    // store the forwarding pointer into the mark word
    if (cast_from_oop<HeapWord*>(q) != compact_top) {
-     q->forward_to(cast_to_oop(compact_top));
+     forwarding->forward_to(q, cast_to_oop(compact_top));
      assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
    } else {
      // if the object isn't moving we can just set the mark to the default
      // mark and handle it specially later on.
      q->init_mark();
-     assert(q->forwardee() == NULL, "should be forwarded to NULL");
+     assert(!q->is_forwarded(), "should not be forwarded");
    }
  
    compact_top += size;
  
    // We need to update the offset table so that the beginnings of objects can be

@@ -416,16 +417,17 @@
    const intx interval = PrefetchScanIntervalInBytes;
  
    HeapWord* cur_obj = bottom();
    HeapWord* scan_limit = top();
  
+   SlidingForwarding* const forwarding = GenCollectedHeap::heap()->forwarding();
    while (cur_obj < scan_limit) {
      if (cast_to_oop(cur_obj)->is_gc_marked()) {
        // prefetch beyond cur_obj
        Prefetch::write(cur_obj, interval);
        size_t size = cast_to_oop(cur_obj)->size();
-       compact_top = cp->space->forward(cast_to_oop(cur_obj), size, cp, compact_top);
+       compact_top = cp->space->forward(cast_to_oop(cur_obj), size, cp, compact_top, forwarding);
        cur_obj += size;
        end_of_live = cur_obj;
      } else {
        // run over all the contiguous dead objects
        HeapWord* end = cur_obj;

@@ -437,11 +439,11 @@
  
        // see if we might want to pretend this object is alive so that
        // we don't have to compact quite as often.
        if (cur_obj == compact_top && dead_spacer.insert_deadspace(cur_obj, end)) {
          oop obj = cast_to_oop(cur_obj);
-         compact_top = cp->space->forward(obj, obj->size(), cp, compact_top);
+         compact_top = cp->space->forward(obj, obj->size(), cp, compact_top, forwarding);
          end_of_live = end;
        } else {
          // otherwise, it really is a free region.
  
          // cur_obj is a pointer to a dead object. Use this dead memory to store a pointer to the next live object.

@@ -480,10 +482,11 @@
    // Used by MarkSweep::mark_sweep_phase3()
  
    HeapWord* cur_obj = bottom();
    HeapWord* const end_of_live = _end_of_live;  // Established by prepare_for_compaction().
    HeapWord* const first_dead = _first_dead;    // Established by prepare_for_compaction().
+   const SlidingForwarding* const forwarding = GenCollectedHeap::heap()->forwarding();
  
    assert(first_dead <= end_of_live, "Stands to reason, no?");
  
    const intx interval = PrefetchScanIntervalInBytes;
  

@@ -491,11 +494,11 @@
    while (cur_obj < end_of_live) {
      Prefetch::write(cur_obj, interval);
      if (cur_obj < first_dead || cast_to_oop(cur_obj)->is_gc_marked()) {
        // cur_obj is alive
        // point all the oops to the new location
-       size_t size = MarkSweep::adjust_pointers(cast_to_oop(cur_obj));
+       size_t size = MarkSweep::adjust_pointers(forwarding, cast_to_oop(cur_obj));
        debug_only(prev_obj = cur_obj);
        cur_obj += size;
      } else {
        debug_only(prev_obj = cur_obj);
        // cur_obj is not a live object, instead it points at the next live object

@@ -532,10 +535,12 @@
      // All object before _first_dead can be skipped. They should not be moved.
      // A pointer to the first live object is stored at the memory location for _first_dead.
      cur_obj = *(HeapWord**)(_first_dead);
    }
  
+   const SlidingForwarding* const forwarding = GenCollectedHeap::heap()->forwarding();
+ 
    debug_only(HeapWord* prev_obj = NULL);
    while (cur_obj < end_of_live) {
      if (!cast_to_oop(cur_obj)->is_gc_marked()) {
        debug_only(prev_obj = cur_obj);
        // The first word of the dead object contains a pointer to the next live object or end of space.

@@ -545,11 +550,11 @@
        // prefetch beyond q
        Prefetch::read(cur_obj, scan_interval);
  
        // size and destination
        size_t size = cast_to_oop(cur_obj)->size();
-       HeapWord* compaction_top = cast_from_oop<HeapWord*>(cast_to_oop(cur_obj)->forwardee());
+       HeapWord* compaction_top = cast_from_oop<HeapWord*>(forwarding->forwardee(cast_to_oop(cur_obj)));
  
        // prefetch beyond compaction_top
        Prefetch::write(compaction_top, copy_interval);
  
        // copy object and reinit its mark

@@ -747,18 +752,18 @@
    if (size >= align_object_size(array_header_size)) {
      size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
      // allocate uninitialized int array
      typeArrayOop t = (typeArrayOop) cast_to_oop(allocate(size));
      assert(t != NULL, "allocation should succeed");
-     t->set_mark(markWord::prototype());
+     t->set_mark(Universe::intArrayKlassObj()->prototype_header());
      t->set_klass(Universe::intArrayKlassObj());
      t->set_length((int)length);
    } else {
      assert(size == CollectedHeap::min_fill_size(),
             "size for smallest fake object doesn't match");
      instanceOop obj = (instanceOop) cast_to_oop(allocate(size));
-     obj->set_mark(markWord::prototype());
+     obj->set_mark(vmClasses::Object_klass()->prototype_header());
      obj->set_klass_gap(0);
      obj->set_klass(vmClasses::Object_klass());
    }
  }
  
< prev index next >