< prev index next >

src/hotspot/cpu/x86/vm_version_x86.cpp

Print this page

1836     FLAG_SET_DEFAULT(UseFastStosb, false);
1837   }
1838 
1839   // For AMD Processors use XMM/YMM MOVDQU instructions
1840   // for Object Initialization as default
1841   if (is_amd() && cpu_family() >= 0x19) {
1842     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1843       UseFastStosb = false;
1844     }
1845   }
1846 
1847 #ifdef COMPILER2
1848   if (is_intel() && MaxVectorSize > 16) {
1849     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1850       UseFastStosb = false;
1851     }
1852   }
1853 #endif
1854 
1855   // Use XMM/YMM MOVDQU instruction for Object Initialization
1856   if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1857     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1858       UseXMMForObjInit = true;
1859     }
1860   } else if (UseXMMForObjInit) {
1861     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1862     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1863   }
1864 
1865 #ifdef COMPILER2
1866   if (FLAG_IS_DEFAULT(AlignVector)) {
1867     // Modern processors allow misaligned memory operations for vectors.
1868     AlignVector = !UseUnalignedLoadStores;
1869   }
1870 #endif // COMPILER2
1871 
1872   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1873     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1874       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1875     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1876       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);

1836     FLAG_SET_DEFAULT(UseFastStosb, false);
1837   }
1838 
1839   // For AMD Processors use XMM/YMM MOVDQU instructions
1840   // for Object Initialization as default
1841   if (is_amd() && cpu_family() >= 0x19) {
1842     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1843       UseFastStosb = false;
1844     }
1845   }
1846 
1847 #ifdef COMPILER2
1848   if (is_intel() && MaxVectorSize > 16) {
1849     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1850       UseFastStosb = false;
1851     }
1852   }
1853 #endif
1854 
1855   // Use XMM/YMM MOVDQU instruction for Object Initialization
1856   if (UseSSE >= 2 && UseUnalignedLoadStores) {
1857     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1858       UseXMMForObjInit = true;
1859     }
1860   } else if (UseXMMForObjInit) {
1861     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1862     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1863   }
1864 
1865 #ifdef COMPILER2
1866   if (FLAG_IS_DEFAULT(AlignVector)) {
1867     // Modern processors allow misaligned memory operations for vectors.
1868     AlignVector = !UseUnalignedLoadStores;
1869   }
1870 #endif // COMPILER2
1871 
1872   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1873     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1874       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1875     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1876       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
< prev index next >