< prev index next > src/hotspot/share/runtime/arguments.cpp
Print this page
// NOTE: set_use_compressed_klass_ptrs() must be called after calling
// set_use_compressed_oops().
void Arguments::set_use_compressed_klass_ptrs() {
#ifdef _LP64
+
+ if (UseCompactObjectHeaders) {
+ // 512 byte alignment, 22-bit values (Lilliput)
+ LogKlassAlignmentInBytes = 9;
+ MaxNarrowKlassPointerBits = 22;
+ } else {
+ // Traditional: 8 byte alignment, 32-bit values
+ LogKlassAlignmentInBytes = 3;
+ MaxNarrowKlassPointerBits = 32;
+ }
+
+ KlassAlignmentInBytes = 1 << LogKlassAlignmentInBytes;
+ assert(is_aligned(KlassAlignmentInBytes, BytesPerWord), "Must be at least word-sized");
+ KlassAlignmentInWords = KlassAlignmentInBytes / BytesPerWord;
+ NarrowKlassPointerBitMask = ((((uint64_t)1) << MaxNarrowKlassPointerBits) - 1);
+ KlassEncodingMetaspaceMax = UCONST64(1) << (MaxNarrowKlassPointerBits + LogKlassAlignmentInBytes);
+
// On some architectures, the use of UseCompressedClassPointers implies the use of
// UseCompressedOops. The reason is that the rheap_base register of said platforms
// is reused to perform some optimized spilling, in order to use rheap_base as a
// temp register. But by treating it as any other temp register, spilling can typically
// be completely avoided instead. So it is better not to perform this trick. And by
if (CompressedClassSpaceSize > KlassEncodingMetaspaceMax) {
warning("CompressedClassSpaceSize is too large for UseCompressedClassPointers");
FLAG_SET_DEFAULT(UseCompressedClassPointers, false);
}
}
- #endif // _LP64
+
+ // Assert validity of compressed class space size. User arg should have been checked at this point
+ // (see CompressedClassSpaceSizeConstraintFunc()), so no need to be nice about it, this fires in
+ // case the default is wrong.
+ // TODO: This is placed wrong. The CompressedClassSpaceSizeFunc is done after ergo, but this
+ // assert is during ergo.
+ // assert(CompressedClassSpaceSize <= Metaspace::max_class_space_size(),
+ // "CompressedClassSpaceSize " SIZE_FORMAT " too large (max: " SIZE_FORMAT ")",
+ // CompressedClassSpaceSize, Metaspace::max_class_space_size());
+ #endif
}
void Arguments::set_conservative_max_heap_alignment() {
// The conservative maximum required alignment for the heap is the maximum of
// the alignments imposed by several sources: any requirements from the heap
#ifndef CAN_SHOW_REGISTERS_ON_ASSERT
UNSUPPORTED_OPTION(ShowRegistersOnAssert);
#endif // CAN_SHOW_REGISTERS_ON_ASSERT
+ #ifdef _LP64
+ if (UseCompactObjectHeaders && FLAG_IS_CMDLINE(UseCompressedClassPointers) && !UseCompressedClassPointers) {
+ // If user specifies -UseCompressedClassPointers, disable compact headers with a warning.
+ warning("Compact object headers require compressed class pointers. Disabling compact object headers.");
+ FLAG_SET_DEFAULT(UseCompactObjectHeaders, false);
+ }
+
+ if (UseCompactObjectHeaders && LockingMode == LM_LEGACY) {
+ FLAG_SET_DEFAULT(LockingMode, LM_LIGHTWEIGHT);
+ }
+
+ if (!UseCompactObjectHeaders) {
+ FLAG_SET_DEFAULT(UseSharedSpaces, false);
+ }
+ #endif
+
return JNI_OK;
}
// Helper class for controlling the lifetime of JavaVMInitArgs
// objects. The contents of the JavaVMInitArgs are guaranteed to be
< prev index next >