< prev index next >

src/hotspot/cpu/x86/vm_version_x86.cpp

Print this page

1837     FLAG_SET_DEFAULT(UseFastStosb, false);
1838   }
1839 
1840   // For AMD Processors use XMM/YMM MOVDQU instructions
1841   // for Object Initialization as default
1842   if (is_amd() && cpu_family() >= 0x19) {
1843     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1844       UseFastStosb = false;
1845     }
1846   }
1847 
1848 #ifdef COMPILER2
1849   if (is_intel() && MaxVectorSize > 16) {
1850     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1851       UseFastStosb = false;
1852     }
1853   }
1854 #endif
1855 
1856   // Use XMM/YMM MOVDQU instruction for Object Initialization
1857   if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1858     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1859       UseXMMForObjInit = true;
1860     }
1861   } else if (UseXMMForObjInit) {
1862     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1863     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1864   }
1865 
1866 #ifdef COMPILER2
1867   if (FLAG_IS_DEFAULT(AlignVector)) {
1868     // Modern processors allow misaligned memory operations for vectors.
1869     AlignVector = !UseUnalignedLoadStores;
1870   }
1871 #endif // COMPILER2
1872 
1873   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1874     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1875       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1876     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1877       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);

1837     FLAG_SET_DEFAULT(UseFastStosb, false);
1838   }
1839 
1840   // For AMD Processors use XMM/YMM MOVDQU instructions
1841   // for Object Initialization as default
1842   if (is_amd() && cpu_family() >= 0x19) {
1843     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1844       UseFastStosb = false;
1845     }
1846   }
1847 
1848 #ifdef COMPILER2
1849   if (is_intel() && MaxVectorSize > 16) {
1850     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1851       UseFastStosb = false;
1852     }
1853   }
1854 #endif
1855 
1856   // Use XMM/YMM MOVDQU instruction for Object Initialization
1857   if (UseSSE >= 2 && UseUnalignedLoadStores) {
1858     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1859       UseXMMForObjInit = true;
1860     }
1861   } else if (UseXMMForObjInit) {
1862     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1863     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1864   }
1865 
1866 #ifdef COMPILER2
1867   if (FLAG_IS_DEFAULT(AlignVector)) {
1868     // Modern processors allow misaligned memory operations for vectors.
1869     AlignVector = !UseUnalignedLoadStores;
1870   }
1871 #endif // COMPILER2
1872 
1873   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1874     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1875       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1876     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1877       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
< prev index next >