< prev index next >

src/hotspot/cpu/x86/vm_version_x86.cpp

Print this page

1741     FLAG_SET_DEFAULT(UseFastStosb, false);
1742   }
1743 
1744   // For AMD Processors use XMM/YMM MOVDQU instructions
1745   // for Object Initialization as default
1746   if (is_amd() && cpu_family() >= 0x19) {
1747     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1748       UseFastStosb = false;
1749     }
1750   }
1751 
1752 #ifdef COMPILER2
1753   if (is_intel() && MaxVectorSize > 16) {
1754     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1755       UseFastStosb = false;
1756     }
1757   }
1758 #endif
1759 
1760   // Use XMM/YMM MOVDQU instruction for Object Initialization
1761   if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1762     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1763       UseXMMForObjInit = true;
1764     }
1765   } else if (UseXMMForObjInit) {
1766     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1767     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1768   }
1769 
1770 #ifdef COMPILER2
1771   if (FLAG_IS_DEFAULT(AlignVector)) {
1772     // Modern processors allow misaligned memory operations for vectors.
1773     AlignVector = !UseUnalignedLoadStores;
1774   }
1775 #endif // COMPILER2
1776 
1777   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1778     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1779       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1780     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1781       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);

1741     FLAG_SET_DEFAULT(UseFastStosb, false);
1742   }
1743 
1744   // For AMD Processors use XMM/YMM MOVDQU instructions
1745   // for Object Initialization as default
1746   if (is_amd() && cpu_family() >= 0x19) {
1747     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1748       UseFastStosb = false;
1749     }
1750   }
1751 
1752 #ifdef COMPILER2
1753   if (is_intel() && MaxVectorSize > 16) {
1754     if (FLAG_IS_DEFAULT(UseFastStosb)) {
1755       UseFastStosb = false;
1756     }
1757   }
1758 #endif
1759 
1760   // Use XMM/YMM MOVDQU instruction for Object Initialization
1761   if (UseSSE >= 2 && UseUnalignedLoadStores) {
1762     if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1763       UseXMMForObjInit = true;
1764     }
1765   } else if (UseXMMForObjInit) {
1766     warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1767     FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1768   }
1769 
1770 #ifdef COMPILER2
1771   if (FLAG_IS_DEFAULT(AlignVector)) {
1772     // Modern processors allow misaligned memory operations for vectors.
1773     AlignVector = !UseUnalignedLoadStores;
1774   }
1775 #endif // COMPILER2
1776 
1777   if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1778     if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1779       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1780     } else if (!supports_sse() && supports_3dnow_prefetch()) {
1781       FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
< prev index next >