< prev index next >

src/hotspot/cpu/x86/vm_version_x86.cpp

Print this page

1775     FLAG_SET_DEFAULT(UseFastStosb, false);
1776   }
1777 
1778   // For AMD Processors use XMM/YMM MOVDQU instructions
1779   // for Object Initialization as default
1780   if (is_amd() && cpu_family() >= 0x19) {
1781     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1782       UseFastStosb = false;
1783     }
1784   }
1785 
1786 #ifdef COMPILER2
1787   if (is_intel() && MaxVectorSize > 16) {
1788     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1789       UseFastStosb = false;
1790     }
1791   }
1792 #endif
1793 
1794   // Use XMM/YMM MOVDQU instruction for Object Initialization
1795   if (!UseFastStosb && UseUnalignedLoadStores) {
1796     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1797       UseXMMForObjInit = true;
1798     }
1799   } else if (UseXMMForObjInit) {
1800     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1801     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1802   }
1803 
1804 #ifdef COMPILER2
1805   if (FLAG_IS_DEFAULT(AlignVector)) {
1806     // Modern processors allow misaligned memory operations for vectors.
1807     AlignVector = !UseUnalignedLoadStores;
1808   }
1809 #endif // COMPILER2
1810 
1811   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1812     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1813       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1814     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1815       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);

1775     FLAG_SET_DEFAULT(UseFastStosb, false);
1776   }
1777 
1778   // For AMD Processors use XMM/YMM MOVDQU instructions
1779   // for Object Initialization as default
1780   if (is_amd() && cpu_family() >= 0x19) {
1781     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1782       UseFastStosb = false;
1783     }
1784   }
1785 
1786 #ifdef COMPILER2
1787   if (is_intel() && MaxVectorSize > 16) {
1788     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1789       UseFastStosb = false;
1790     }
1791   }
1792 #endif
1793 
1794   // Use XMM/YMM MOVDQU instruction for Object Initialization
1795   if (UseUnalignedLoadStores) {
1796     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1797       UseXMMForObjInit = true;
1798     }
1799   } else if (UseXMMForObjInit) {
1800     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1801     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1802   }
1803 
1804 #ifdef COMPILER2
1805   if (FLAG_IS_DEFAULT(AlignVector)) {
1806     // Modern processors allow misaligned memory operations for vectors.
1807     AlignVector = !UseUnalignedLoadStores;
1808   }
1809 #endif // COMPILER2
1810 
1811   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1812     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1813       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1814     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1815       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
< prev index next >