12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/vmClasses.hpp"
27 #include "classfile/vmSymbols.hpp"
28 #include "gc/shared/blockOffsetTable.inline.hpp"
29 #include "gc/shared/collectedHeap.inline.hpp"
30 #include "gc/shared/genCollectedHeap.hpp"
31 #include "gc/shared/genOopClosures.inline.hpp"
32 #include "gc/shared/space.hpp"
33 #include "gc/shared/space.inline.hpp"
34 #include "gc/shared/spaceDecorator.inline.hpp"
35 #include "memory/iterator.inline.hpp"
36 #include "memory/universe.hpp"
37 #include "oops/oop.inline.hpp"
38 #include "runtime/atomic.hpp"
39 #include "runtime/java.hpp"
40 #include "runtime/prefetch.inline.hpp"
41 #include "runtime/safepoint.hpp"
42 #include "utilities/align.hpp"
43 #include "utilities/copy.hpp"
44 #include "utilities/globalDefinitions.hpp"
45 #include "utilities/macros.hpp"
46 #if INCLUDE_SERIALGC
47 #include "gc/serial/defNewGeneration.hpp"
48 #endif
49
50 HeapWord* DirtyCardToOopClosure::get_actual_top(HeapWord* top,
51 HeapWord* top_obj) {
329 mangler()->mangle_unused_area();
330 }
331 void ContiguousSpace::mangle_unused_area_complete() {
332 mangler()->mangle_unused_area_complete();
333 }
334 #endif // NOT_PRODUCT
335
336 void CompactibleSpace::initialize(MemRegion mr,
337 bool clear_space,
338 bool mangle_space) {
339 Space::initialize(mr, clear_space, mangle_space);
340 set_compaction_top(bottom());
341 _next_compaction_space = NULL;
342 }
343
344 void CompactibleSpace::clear(bool mangle_space) {
345 Space::clear(mangle_space);
346 _compaction_top = bottom();
347 }
348
349 HeapWord* CompactibleSpace::forward(oop q, size_t size,
350 CompactPoint* cp, HeapWord* compact_top) {
351 // q is alive
352 // First check if we should switch compaction space
353 assert(this == cp->space, "'this' should be current compaction space.");
354 size_t compaction_max_size = pointer_delta(end(), compact_top);
355 while (size > compaction_max_size) {
356 // switch to next compaction space
357 cp->space->set_compaction_top(compact_top);
358 cp->space = cp->space->next_compaction_space();
359 if (cp->space == NULL) {
360 cp->gen = GenCollectedHeap::heap()->young_gen();
361 assert(cp->gen != NULL, "compaction must succeed");
362 cp->space = cp->gen->first_compaction_space();
363 assert(cp->space != NULL, "generation must have a first compaction space");
364 }
365 compact_top = cp->space->bottom();
366 cp->space->set_compaction_top(compact_top);
367 cp->threshold = cp->space->initialize_threshold();
368 compaction_max_size = pointer_delta(cp->space->end(), compact_top);
369 }
370
371 // store the forwarding pointer into the mark word
372 if (cast_from_oop<HeapWord*>(q) != compact_top) {
373 q->forward_to(cast_to_oop(compact_top));
374 assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
375 } else {
376 // if the object isn't moving we can just set the mark to the default
377 // mark and handle it specially later on.
378 q->init_mark();
379 assert(q->forwardee() == NULL, "should be forwarded to NULL");
380 }
381
382 compact_top += size;
383
384 // we need to update the offset table so that the beginnings of objects can be
385 // found during scavenge. Note that we are updating the offset table based on
386 // where the object will be once the compaction phase finishes.
387 if (compact_top > cp->threshold)
388 cp->threshold =
389 cp->space->cross_threshold(compact_top - size, compact_top);
390 return compact_top;
391 }
392
393 #if INCLUDE_SERIALGC
394
395 void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
396 scan_and_forward(this, cp);
397 }
398
399 void CompactibleSpace::adjust_pointers() {
400 // Check first is there is any work to do.
401 if (used() == 0) {
402 return; // Nothing to do.
403 }
404
405 scan_and_adjust_pointers(this);
406 }
407
408 void CompactibleSpace::compact() {
409 scan_and_compact(this);
410 }
411
412 #endif // INCLUDE_SERIALGC
413
414 void Space::print_short() const { print_short_on(tty); }
415
416 void Space::print_short_on(outputStream* st) const {
417 st->print(" space " SIZE_FORMAT "K, %3d%% used", capacity() / K,
418 (int) ((double) used() * 100 / capacity()));
419 }
420
421 void Space::print() const { print_on(tty); }
422
423 void Space::print_on(outputStream* st) const {
424 print_short_on(st);
425 st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ")",
426 p2i(bottom()), p2i(end()));
427 }
428
429 void ContiguousSpace::print_on(outputStream* st) const {
569 }
570
571 // Lock-free.
572 HeapWord* ContiguousSpace::par_allocate(size_t size) {
573 return par_allocate_impl(size);
574 }
575
576 void ContiguousSpace::allocate_temporary_filler(int factor) {
577 // allocate temporary type array decreasing free size with factor 'factor'
578 assert(factor >= 0, "just checking");
579 size_t size = pointer_delta(end(), top());
580
581 // if space is full, return
582 if (size == 0) return;
583
584 if (factor > 0) {
585 size -= size/factor;
586 }
587 size = align_object_size(size);
588
589 const size_t array_header_size = typeArrayOopDesc::header_size(T_INT);
590 if (size >= align_object_size(array_header_size)) {
591 size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
592 // allocate uninitialized int array
593 typeArrayOop t = (typeArrayOop) cast_to_oop(allocate(size));
594 assert(t != NULL, "allocation should succeed");
595 t->set_mark(markWord::prototype());
596 t->set_klass(Universe::intArrayKlassObj());
597 t->set_length((int)length);
598 } else {
599 assert(size == CollectedHeap::min_fill_size(),
600 "size for smallest fake object doesn't match");
601 instanceOop obj = (instanceOop) cast_to_oop(allocate(size));
602 obj->set_mark(markWord::prototype());
603 obj->set_klass_gap(0);
604 obj->set_klass(vmClasses::Object_klass());
605 }
606 }
607
608 HeapWord* OffsetTableContigSpace::initialize_threshold() {
609 return _offsets.initialize_threshold();
610 }
611
612 HeapWord* OffsetTableContigSpace::cross_threshold(HeapWord* start, HeapWord* end) {
613 _offsets.alloc_block(start, end);
614 return _offsets.threshold();
615 }
616
617 OffsetTableContigSpace::OffsetTableContigSpace(BlockOffsetSharedArray* sharedOffsetArray,
618 MemRegion mr) :
619 _offsets(sharedOffsetArray, mr),
620 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true)
621 {
622 _offsets.set_contig_space(this);
623 initialize(mr, SpaceDecorator::Clear, SpaceDecorator::Mangle);
624 }
|
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/vmClasses.hpp"
27 #include "classfile/vmSymbols.hpp"
28 #include "gc/shared/blockOffsetTable.inline.hpp"
29 #include "gc/shared/collectedHeap.inline.hpp"
30 #include "gc/shared/genCollectedHeap.hpp"
31 #include "gc/shared/genOopClosures.inline.hpp"
32 #include "gc/shared/slidingForwarding.inline.hpp"
33 #include "gc/shared/space.hpp"
34 #include "gc/shared/space.inline.hpp"
35 #include "gc/shared/spaceDecorator.inline.hpp"
36 #include "memory/iterator.inline.hpp"
37 #include "memory/universe.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "runtime/atomic.hpp"
40 #include "runtime/java.hpp"
41 #include "runtime/prefetch.inline.hpp"
42 #include "runtime/safepoint.hpp"
43 #include "utilities/align.hpp"
44 #include "utilities/copy.hpp"
45 #include "utilities/globalDefinitions.hpp"
46 #include "utilities/macros.hpp"
47 #if INCLUDE_SERIALGC
48 #include "gc/serial/defNewGeneration.hpp"
49 #endif
50
51 HeapWord* DirtyCardToOopClosure::get_actual_top(HeapWord* top,
52 HeapWord* top_obj) {
330 mangler()->mangle_unused_area();
331 }
332 void ContiguousSpace::mangle_unused_area_complete() {
333 mangler()->mangle_unused_area_complete();
334 }
335 #endif // NOT_PRODUCT
336
337 void CompactibleSpace::initialize(MemRegion mr,
338 bool clear_space,
339 bool mangle_space) {
340 Space::initialize(mr, clear_space, mangle_space);
341 set_compaction_top(bottom());
342 _next_compaction_space = NULL;
343 }
344
345 void CompactibleSpace::clear(bool mangle_space) {
346 Space::clear(mangle_space);
347 _compaction_top = bottom();
348 }
349
350 template <bool ALT_FWD>
351 HeapWord* CompactibleSpace::forward(oop q, size_t size,
352 CompactPoint* cp, HeapWord* compact_top) {
353 // q is alive
354 // First check if we should switch compaction space
355 assert(this == cp->space, "'this' should be current compaction space.");
356 size_t compaction_max_size = pointer_delta(end(), compact_top);
357 while (size > compaction_max_size) {
358 // switch to next compaction space
359 cp->space->set_compaction_top(compact_top);
360 cp->space = cp->space->next_compaction_space();
361 if (cp->space == NULL) {
362 cp->gen = GenCollectedHeap::heap()->young_gen();
363 assert(cp->gen != NULL, "compaction must succeed");
364 cp->space = cp->gen->first_compaction_space();
365 assert(cp->space != NULL, "generation must have a first compaction space");
366 }
367 compact_top = cp->space->bottom();
368 cp->space->set_compaction_top(compact_top);
369 cp->threshold = cp->space->initialize_threshold();
370 compaction_max_size = pointer_delta(cp->space->end(), compact_top);
371 }
372
373 // store the forwarding pointer into the mark word
374 if (cast_from_oop<HeapWord*>(q) != compact_top) {
375 SlidingForwarding::forward_to<ALT_FWD>(q, cast_to_oop(compact_top));
376 assert(q->is_gc_marked(), "encoding the pointer should preserve the mark");
377 } else {
378 // if the object isn't moving we can just set the mark to the default
379 // mark and handle it specially later on.
380 q->init_mark();
381 assert(SlidingForwarding::is_not_forwarded(q), "should not be forwarded");
382 }
383
384 compact_top += size;
385
386 // we need to update the offset table so that the beginnings of objects can be
387 // found during scavenge. Note that we are updating the offset table based on
388 // where the object will be once the compaction phase finishes.
389 if (compact_top > cp->threshold)
390 cp->threshold =
391 cp->space->cross_threshold(compact_top - size, compact_top);
392 return compact_top;
393 }
394
395 #if INCLUDE_SERIALGC
396
397 void ContiguousSpace::prepare_for_compaction(CompactPoint* cp) {
398 if (UseAltGCForwarding) {
399 scan_and_forward<true>(this, cp);
400 } else {
401 scan_and_forward<false>(this, cp);
402 }
403 }
404
405 void CompactibleSpace::adjust_pointers() {
406 // Check first is there is any work to do.
407 if (used() == 0) {
408 return; // Nothing to do.
409 }
410
411 if (UseAltGCForwarding) {
412 scan_and_adjust_pointers<true>(this);
413 } else {
414 scan_and_adjust_pointers<false>(this);
415 }
416 }
417
418 void CompactibleSpace::compact() {
419 if (UseAltGCForwarding) {
420 scan_and_compact<true>(this);
421 } else {
422 scan_and_compact<false>(this);
423 }
424 }
425
426 #endif // INCLUDE_SERIALGC
427
428 void Space::print_short() const { print_short_on(tty); }
429
430 void Space::print_short_on(outputStream* st) const {
431 st->print(" space " SIZE_FORMAT "K, %3d%% used", capacity() / K,
432 (int) ((double) used() * 100 / capacity()));
433 }
434
435 void Space::print() const { print_on(tty); }
436
437 void Space::print_on(outputStream* st) const {
438 print_short_on(st);
439 st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ")",
440 p2i(bottom()), p2i(end()));
441 }
442
443 void ContiguousSpace::print_on(outputStream* st) const {
583 }
584
585 // Lock-free.
586 HeapWord* ContiguousSpace::par_allocate(size_t size) {
587 return par_allocate_impl(size);
588 }
589
590 void ContiguousSpace::allocate_temporary_filler(int factor) {
591 // allocate temporary type array decreasing free size with factor 'factor'
592 assert(factor >= 0, "just checking");
593 size_t size = pointer_delta(end(), top());
594
595 // if space is full, return
596 if (size == 0) return;
597
598 if (factor > 0) {
599 size -= size/factor;
600 }
601 size = align_object_size(size);
602
603 const size_t array_header_size = (arrayOopDesc::base_offset_in_bytes(T_INT) + BytesPerWord) / BytesPerWord;
604 if (size >= align_object_size(array_header_size)) {
605 size_t length = (size - array_header_size) * (HeapWordSize / sizeof(jint));
606 // allocate uninitialized int array
607 typeArrayOop t = (typeArrayOop) cast_to_oop(allocate(size));
608 assert(t != NULL, "allocation should succeed");
609 if (UseCompactObjectHeaders) {
610 t->set_mark(Universe::intArrayKlassObj()->prototype_header());
611 } else {
612 t->set_mark(markWord::prototype());
613 t->set_klass(Universe::intArrayKlassObj());
614 }
615 t->set_length((int)length);
616 } else {
617 assert(size == CollectedHeap::min_fill_size(),
618 "size for smallest fake object doesn't match");
619 instanceOop obj = (instanceOop) cast_to_oop(allocate(size));
620 if (UseCompactObjectHeaders) {
621 obj->set_mark(vmClasses::Object_klass()->prototype_header());
622 } else {
623 obj->set_mark(markWord::prototype());
624 obj->set_klass_gap(0);
625 obj->set_klass(vmClasses::Object_klass());
626 }
627 }
628 }
629
630 HeapWord* OffsetTableContigSpace::initialize_threshold() {
631 return _offsets.initialize_threshold();
632 }
633
634 HeapWord* OffsetTableContigSpace::cross_threshold(HeapWord* start, HeapWord* end) {
635 _offsets.alloc_block(start, end);
636 return _offsets.threshold();
637 }
638
639 OffsetTableContigSpace::OffsetTableContigSpace(BlockOffsetSharedArray* sharedOffsetArray,
640 MemRegion mr) :
641 _offsets(sharedOffsetArray, mr),
642 _par_alloc_lock(Mutex::leaf, "OffsetTableContigSpace par alloc lock", true)
643 {
644 _offsets.set_contig_space(this);
645 initialize(mr, SpaceDecorator::Clear, SpaceDecorator::Mangle);
646 }
|