< prev index next >

src/hotspot/cpu/x86/vm_version_x86.cpp

Print this page

1770     FLAG_SET_DEFAULT(UseFastStosb, false);
1771   }
1772 
1773   // For AMD Processors use XMM/YMM MOVDQU instructions
1774   // for Object Initialization as default
1775   if (is_amd() && cpu_family() >= 0x19) {
1776     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1777       UseFastStosb = false;
1778     }
1779   }
1780 
1781 #ifdef COMPILER2
1782   if (is_intel() && MaxVectorSize > 16) {
1783     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1784       UseFastStosb = false;
1785     }
1786   }
1787 #endif
1788 
1789   // Use XMM/YMM MOVDQU instruction for Object Initialization
1790   if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1791     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1792       UseXMMForObjInit = true;
1793     }
1794   } else if (UseXMMForObjInit) {
1795     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1796     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1797   }
1798 
1799 #ifdef COMPILER2
1800   if (FLAG_IS_DEFAULT(AlignVector)) {
1801     // Modern processors allow misaligned memory operations for vectors.
1802     AlignVector = !UseUnalignedLoadStores;
1803   }
1804 #endif // COMPILER2
1805 
1806   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1807     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1808       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1809     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1810       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);

1770     FLAG_SET_DEFAULT(UseFastStosb, false);
1771   }
1772 
1773   // For AMD Processors use XMM/YMM MOVDQU instructions
1774   // for Object Initialization as default
1775   if (is_amd() && cpu_family() >= 0x19) {
1776     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1777       UseFastStosb = false;
1778     }
1779   }
1780 
1781 #ifdef COMPILER2
1782   if (is_intel() && MaxVectorSize > 16) {
1783     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1784       UseFastStosb = false;
1785     }
1786   }
1787 #endif
1788 
1789   // Use XMM/YMM MOVDQU instruction for Object Initialization
1790   if (UseSSE >= 2 && UseUnalignedLoadStores) {
1791     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1792       UseXMMForObjInit = true;
1793     }
1794   } else if (UseXMMForObjInit) {
1795     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1796     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1797   }
1798 
1799 #ifdef COMPILER2
1800   if (FLAG_IS_DEFAULT(AlignVector)) {
1801     // Modern processors allow misaligned memory operations for vectors.
1802     AlignVector = !UseUnalignedLoadStores;
1803   }
1804 #endif // COMPILER2
1805 
1806   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1807     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1808       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1809     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1810       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
< prev index next >