< prev index next >

src/hotspot/cpu/x86/vm_version_x86.cpp

Print this page

1777     FLAG_SET_DEFAULT(UseFastStosb, false);
1778   }
1779 
1780   // For AMD Processors use XMM/YMM MOVDQU instructions
1781   // for Object Initialization as default
1782   if (is_amd() && cpu_family() >= 0x19) {
1783     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1784       UseFastStosb = false;
1785     }
1786   }
1787 
1788 #ifdef COMPILER2
1789   if (is_intel() && MaxVectorSize > 16) {
1790     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1791       UseFastStosb = false;
1792     }
1793   }
1794 #endif
1795 
1796   // Use XMM/YMM MOVDQU instruction for Object Initialization
1797   if (!UseFastStosb && UseUnalignedLoadStores) {
1798     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1799       UseXMMForObjInit = true;
1800     }
1801   } else if (UseXMMForObjInit) {
1802     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1803     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1804   }
1805 
1806 #ifdef COMPILER2
1807   if (FLAG_IS_DEFAULT(AlignVector)) {
1808     // Modern processors allow misaligned memory operations for vectors.
1809     AlignVector = !UseUnalignedLoadStores;
1810   }
1811 #endif // COMPILER2
1812 
1813   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1814     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1815       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1816     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1817       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);

1777     FLAG_SET_DEFAULT(UseFastStosb, false);
1778   }
1779 
1780   // For AMD Processors use XMM/YMM MOVDQU instructions
1781   // for Object Initialization as default
1782   if (is_amd() && cpu_family() >= 0x19) {
1783     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1784       UseFastStosb = false;
1785     }
1786   }
1787 
1788 #ifdef COMPILER2
1789   if (is_intel() && MaxVectorSize > 16) {
1790     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1791       UseFastStosb = false;
1792     }
1793   }
1794 #endif
1795 
1796   // Use XMM/YMM MOVDQU instruction for Object Initialization
1797   if (UseUnalignedLoadStores) {
1798     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1799       UseXMMForObjInit = true;
1800     }
1801   } else if (UseXMMForObjInit) {
1802     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1803     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1804   }
1805 
1806 #ifdef COMPILER2
1807   if (FLAG_IS_DEFAULT(AlignVector)) {
1808     // Modern processors allow misaligned memory operations for vectors.
1809     AlignVector = !UseUnalignedLoadStores;
1810   }
1811 #endif // COMPILER2
1812 
1813   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1814     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1815       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1816     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1817       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
< prev index next >