1763 FLAG_SET_DEFAULT(UseFastStosb, false);
1764 }
1765
1766 // For AMD Processors use XMM/YMM MOVDQU instructions
1767 // for Object Initialization as default
1768 if (is_amd() && cpu_family() >= 0x19) {
1769 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1770 UseFastStosb = false;
1771 }
1772 }
1773
1774 #ifdef COMPILER2
1775 if (is_intel() && MaxVectorSize > 16) {
1776 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1777 UseFastStosb = false;
1778 }
1779 }
1780 #endif
1781
1782 // Use XMM/YMM MOVDQU instruction for Object Initialization
1783 if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1784 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1785 UseXMMForObjInit = true;
1786 }
1787 } else if (UseXMMForObjInit) {
1788 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1789 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1790 }
1791
1792 #ifdef COMPILER2
1793 if (FLAG_IS_DEFAULT(AlignVector)) {
1794 // Modern processors allow misaligned memory operations for vectors.
1795 AlignVector = !UseUnalignedLoadStores;
1796 }
1797 #endif // COMPILER2
1798
1799 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1800 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1801 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1802 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1803 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|
1763 FLAG_SET_DEFAULT(UseFastStosb, false);
1764 }
1765
1766 // For AMD Processors use XMM/YMM MOVDQU instructions
1767 // for Object Initialization as default
1768 if (is_amd() && cpu_family() >= 0x19) {
1769 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1770 UseFastStosb = false;
1771 }
1772 }
1773
1774 #ifdef COMPILER2
1775 if (is_intel() && MaxVectorSize > 16) {
1776 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1777 UseFastStosb = false;
1778 }
1779 }
1780 #endif
1781
1782 // Use XMM/YMM MOVDQU instruction for Object Initialization
1783 if (UseSSE >= 2 && UseUnalignedLoadStores) {
1784 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1785 UseXMMForObjInit = true;
1786 }
1787 } else if (UseXMMForObjInit) {
1788 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1789 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1790 }
1791
1792 #ifdef COMPILER2
1793 if (FLAG_IS_DEFAULT(AlignVector)) {
1794 // Modern processors allow misaligned memory operations for vectors.
1795 AlignVector = !UseUnalignedLoadStores;
1796 }
1797 #endif // COMPILER2
1798
1799 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1800 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1801 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1802 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1803 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|