< prev index next >

src/hotspot/cpu/x86/vm_version_x86.cpp

Print this page

1787     FLAG_SET_DEFAULT(UseFastStosb, false);
1788   }
1789 
1790   // For AMD Processors use XMM/YMM MOVDQU instructions
1791   // for Object Initialization as default
1792   if (is_amd() && cpu_family() >= 0x19) {
1793     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1794       UseFastStosb = false;
1795     }
1796   }
1797 
1798 #ifdef COMPILER2
1799   if (is_intel() && MaxVectorSize > 16) {
1800     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1801       UseFastStosb = false;
1802     }
1803   }
1804 #endif
1805 
1806   // Use XMM/YMM MOVDQU instruction for Object Initialization
1807   if (!UseFastStosb && UseUnalignedLoadStores) {
1808     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1809       UseXMMForObjInit = true;
1810     }
1811   } else if (UseXMMForObjInit) {
1812     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1813     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1814   }
1815 
1816 #ifdef COMPILER2
1817   if (FLAG_IS_DEFAULT(AlignVector)) {
1818     // Modern processors allow misaligned memory operations for vectors.
1819     AlignVector = !UseUnalignedLoadStores;
1820   }
1821 #endif // COMPILER2
1822 
1823   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1824     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1825       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1826     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1827       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);

1787     FLAG_SET_DEFAULT(UseFastStosb, false);
1788   }
1789 
1790   // For AMD Processors use XMM/YMM MOVDQU instructions
1791   // for Object Initialization as default
1792   if (is_amd() && cpu_family() >= 0x19) {
1793     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1794       UseFastStosb = false;
1795     }
1796   }
1797 
1798 #ifdef COMPILER2
1799   if (is_intel() && MaxVectorSize > 16) {
1800     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1801       UseFastStosb = false;
1802     }
1803   }
1804 #endif
1805 
1806   // Use XMM/YMM MOVDQU instruction for Object Initialization
1807   if (UseUnalignedLoadStores) {
1808     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1809       UseXMMForObjInit = true;
1810     }
1811   } else if (UseXMMForObjInit) {
1812     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1813     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1814   }
1815 
1816 #ifdef COMPILER2
1817   if (FLAG_IS_DEFAULT(AlignVector)) {
1818     // Modern processors allow misaligned memory operations for vectors.
1819     AlignVector = !UseUnalignedLoadStores;
1820   }
1821 #endif // COMPILER2
1822 
1823   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1824     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1825       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1826     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1827       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
< prev index next >