12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/vmClasses.hpp"
27 #include "classfile/vmSymbols.hpp"
28 #include "gc/shared/blockOffsetTable.inline.hpp"
29 #include "gc/shared/collectedHeap.inline.hpp"
30 #include "gc/shared/genCollectedHeap.hpp"
31 #include "gc/shared/genOopClosures.inline.hpp"
32 #include "gc/shared/space.hpp"
33 #include "gc/shared/space.inline.hpp"
34 #include "gc/shared/spaceDecorator.inline.hpp"
35 #include "memory/iterator.inline.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/oop.inline.hpp"
38 #include "runtime/atomic.hpp"
39 #include "runtime/java.hpp"
40 #include "runtime/prefetch.inline.hpp"
41 #include "runtime/safepoint.hpp"
42 #include "utilities/align.hpp"
43 #include "utilities/copy.hpp"
44 #include "utilities/globalDefinitions.hpp"
45 #include "utilities/macros.hpp"
46 #if INCLUDE_SERIALGC
47 #include "gc/serial/defNewGeneration.hpp"
48 #endif
49
50 HeapWord* DirtyCardToOopClosure::get_actual_top(HeapWord* top,
51 HeapWord* top_obj) {
330 }
331 void ContiguousSpace::mangle_unused_area_complete() {
332 mangler()->mangle_unused_area_complete();
333 }
334 #endif // NOT_PRODUCT
335
336 void CompactibleSpace::initialize(MemRegion mr,
337 bool clear_space,
338 bool mangle_space) {
339 Space::initialize(mr, clear_space, mangle_space);
340 set_compaction_top(bottom());
341 _next_compaction_space = NULL;
342 }
343
344 void CompactibleSpace::clear(bool mangle_space) {
345 Space::clear(mangle_space);
346 _compaction_top = bottom();
347 }
348
349 HeapWord* CompactibleSpace::forward(oop q, size_t size,
350 CompactPoint* cp, HeapWord* compact_top) {
351 // q is alive
352 // First check if we should switch compaction space
353 assert(this == cp->space, "'this' should be current compaction space.");
354 size_t compaction_max_size = pointer_delta(end(), compact_top);
355 while (size > compaction_max_size) {
356 // switch to next compaction space
357 cp->space->set_compaction_top(compact_top);
358 cp->space = cp->space->next_compaction_space();
359 if (cp->space == NULL) {
360 cp->gen = GenCollectedHeap::heap()->young_gen();
361 assert(cp->gen != NULL, "compaction must succeed");
362 cp->space = cp->gen->first_compaction_space();
363 assert(cp->space != NULL, "generation must have a first compaction space");
364 }
365 compact_top = cp->space->bottom();
366 cp->space->set_compaction_top(compact_top);
367 cp->threshold = cp->space->initialize_threshold();
368 compaction_max_size = pointer_delta(cp->space->end(), compact_top);
369 }
370
371 // store the forwarding pointer into the mark word
372 if (cast_from_oop<HeapWord*>(q) != compact_top) {
373 q->forward_to(cast_to_oop(compact_top));
374 assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
375 } else {
376 // if the object isn't moving we can just set the mark to the default
377 // mark and handle it specially later on.
378 q->init_mark();
379 assert(q->forwardee() == NULL, "should be forwarded to NULL");
380 }
381
382 compact_top += size;
383
384 // we need to update the offset table so that the beginnings of objects can be
385 // found during scavenge. Note that we are updating the offset table based on
386 // where the object will be once the compaction phase finishes.
387 if (compact_top > cp->threshold)
388 cp->threshold =
389 cp->space->cross_threshold(compact_top - size, compact_top);
390 return compact_top;
391 }
392
393 #if INCLUDE_SERIALGC
394
395 void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
396 scan_and_forward(this, cp);
397 }
398
399 void CompactibleSpace::adjust_pointers() {
569 }
570
571 // Lock-free.
572 HeapWord* ContiguousSpace::par_allocate(size_t size) {
573 return par_allocate_impl(size);
574 }
575
576 void ContiguousSpace::allocate_temporary_filler(int factor) {
577 // allocate temporary type array decreasing free size with factor 'factor'
578 assert(factor >= 0, "just checking");
579 size_t size = pointer_delta(end(), top());
580
581 // if space is full, return
582 if (size == 0) return;
583
584 if (factor > 0) {
585 size -= size/factor;
586 }
587 size = align_object_size(size);
588
589 const size_t array_header_size = typeArrayOopDesc::header_size(T_INT);
590 if (size >= align_object_size(array_header_size)) {
591 size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
592 // allocate uninitialized int array
593 typeArrayOop t = (typeArrayOop) cast_to_oop(allocate(size));
594 assert(t != NULL, "allocation should succeed");
595 t->set_mark(markWord::prototype());
596 t->set_klass(Universe::intArrayKlassObj());
597 t->set_length((int)length);
598 } else {
599 assert(size == CollectedHeap::min_fill_size(),
600 "size for smallest fake object doesn't match");
601 instanceOop obj = (instanceOop) cast_to_oop(allocate(size));
602 obj->set_mark(markWord::prototype());
603 obj->set_klass_gap(0);
604 obj->set_klass(vmClasses::Object_klass());
605 }
606 }
607
608 HeapWord* OffsetTableContigSpace::initialize_threshold() {
609 return _offsets.initialize_threshold();
610 }
611
612 HeapWord* OffsetTableContigSpace::cross_threshold(HeapWord* start, HeapWord* end) {
613 _offsets.alloc_block(start, end);
614 return _offsets.threshold();
615 }
616
617 OffsetTableContigSpace::OffsetTableContigSpace(BlockOffsetSharedArray* sharedOffsetArray,
618 MemRegion mr) :
619 _offsets(sharedOffsetArray, mr),
620 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true)
621 {
622 _offsets.set_contig_space(this);
623 initialize(mr, SpaceDecorator::Clear, SpaceDecorator::Mangle);
624 }
|
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/vmClasses.hpp"
27 #include "classfile/vmSymbols.hpp"
28 #include "gc/shared/blockOffsetTable.inline.hpp"
29 #include "gc/shared/collectedHeap.inline.hpp"
30 #include "gc/shared/genCollectedHeap.hpp"
31 #include "gc/shared/genOopClosures.inline.hpp"
32 #include "gc/shared/slidingForwarding.inline.hpp"
33 #include "gc/shared/space.hpp"
34 #include "gc/shared/space.inline.hpp"
35 #include "gc/shared/spaceDecorator.inline.hpp"
36 #include "memory/iterator.inline.hpp"
37 #include "memory/universe.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "runtime/atomic.hpp"
40 #include "runtime/java.hpp"
41 #include "runtime/prefetch.inline.hpp"
42 #include "runtime/safepoint.hpp"
43 #include "utilities/align.hpp"
44 #include "utilities/copy.hpp"
45 #include "utilities/globalDefinitions.hpp"
46 #include "utilities/macros.hpp"
47 #if INCLUDE_SERIALGC
48 #include "gc/serial/defNewGeneration.hpp"
49 #endif
50
51 HeapWord* DirtyCardToOopClosure::get_actual_top(HeapWord* top,
52 HeapWord* top_obj) {
331 }
332 void ContiguousSpace::mangle_unused_area_complete() {
333 mangler()->mangle_unused_area_complete();
334 }
335 #endif // NOT_PRODUCT
336
337 void CompactibleSpace::initialize(MemRegion mr,
338 bool clear_space,
339 bool mangle_space) {
340 Space::initialize(mr, clear_space, mangle_space);
341 set_compaction_top(bottom());
342 _next_compaction_space = NULL;
343 }
344
345 void CompactibleSpace::clear(bool mangle_space) {
346 Space::clear(mangle_space);
347 _compaction_top = bottom();
348 }
349
350 HeapWord* CompactibleSpace::forward(oop q, size_t size,
351 CompactPoint* cp, HeapWord* compact_top, SlidingForwarding* const forwarding) {
352 // q is alive
353 // First check if we should switch compaction space
354 assert(this == cp->space, "'this' should be current compaction space.");
355 size_t compaction_max_size = pointer_delta(end(), compact_top);
356 while (size > compaction_max_size) {
357 // switch to next compaction space
358 cp->space->set_compaction_top(compact_top);
359 cp->space = cp->space->next_compaction_space();
360 if (cp->space == NULL) {
361 cp->gen = GenCollectedHeap::heap()->young_gen();
362 assert(cp->gen != NULL, "compaction must succeed");
363 cp->space = cp->gen->first_compaction_space();
364 assert(cp->space != NULL, "generation must have a first compaction space");
365 }
366 compact_top = cp->space->bottom();
367 cp->space->set_compaction_top(compact_top);
368 cp->threshold = cp->space->initialize_threshold();
369 compaction_max_size = pointer_delta(cp->space->end(), compact_top);
370 }
371
372 // store the forwarding pointer into the mark word
373 if (cast_from_oop<HeapWord*>(q) != compact_top) {
374 forwarding->forward_to(q, cast_to_oop(compact_top));
375 assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
376 } else {
377 // if the object isn't moving we can just set the mark to the default
378 // mark and handle it specially later on.
379 q->init_mark();
380 assert(!q->is_forwarded(), "should not be forwarded");
381 }
382
383 compact_top += size;
384
385 // we need to update the offset table so that the beginnings of objects can be
386 // found during scavenge. Note that we are updating the offset table based on
387 // where the object will be once the compaction phase finishes.
388 if (compact_top > cp->threshold)
389 cp->threshold =
390 cp->space->cross_threshold(compact_top - size, compact_top);
391 return compact_top;
392 }
393
394 #if INCLUDE_SERIALGC
395
396 void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
397 scan_and_forward(this, cp);
398 }
399
400 void CompactibleSpace::adjust_pointers() {
570 }
571
572 // Lock-free.
573 HeapWord* ContiguousSpace::par_allocate(size_t size) {
574 return par_allocate_impl(size);
575 }
576
577 void ContiguousSpace::allocate_temporary_filler(int factor) {
578 // allocate temporary type array decreasing free size with factor 'factor'
579 assert(factor >= 0, "just checking");
580 size_t size = pointer_delta(end(), top());
581
582 // if space is full, return
583 if (size == 0) return;
584
585 if (factor > 0) {
586 size -= size/factor;
587 }
588 size = align_object_size(size);
589
590 const size_t array_header_size = (arrayOopDesc::base_offset_in_bytes(T_INT) + BytesPerWord) / BytesPerWord;
591 if (size >= align_object_size(array_header_size)) {
592 size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
593 // allocate uninitialized int array
594 typeArrayOop t = (typeArrayOop) cast_to_oop(allocate(size));
595 assert(t != NULL, "allocation should succeed");
596 #ifdef _LP64
597 t->set_mark(Universe::intArrayKlassObj()->prototype_header());
598 #else
599 t->set_mark(markWord::prototype());
600 t->set_klass(Universe::intArrayKlassObj());
601 #endif
602 t->set_length((int)length);
603 } else {
604 assert(size == CollectedHeap::min_fill_size(),
605 "size for smallest fake object doesn't match");
606 instanceOop obj = (instanceOop) cast_to_oop(allocate(size));
607 #ifdef _LP64
608 obj->set_mark(vmClasses::Object_klass()->prototype_header());
609 #else
610 obj->set_mark(markWord::prototype());
611 obj->set_klass(vmClasses::Object_klass());
612 #endif
613 }
614 }
615
616 HeapWord* OffsetTableContigSpace::initialize_threshold() {
617 return _offsets.initialize_threshold();
618 }
619
620 HeapWord* OffsetTableContigSpace::cross_threshold(HeapWord* start, HeapWord* end) {
621 _offsets.alloc_block(start, end);
622 return _offsets.threshold();
623 }
624
625 OffsetTableContigSpace::OffsetTableContigSpace(BlockOffsetSharedArray* sharedOffsetArray,
626 MemRegion mr) :
627 _offsets(sharedOffsetArray, mr),
628 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true)
629 {
630 _offsets.set_contig_space(this);
631 initialize(mr, SpaceDecorator::Clear, SpaceDecorator::Mangle);
632 }
|