1769 FLAG_SET_DEFAULT(UseFastStosb, false);
1770 }
1771
1772 // For AMD Processors use XMM/YMM MOVDQU instructions
1773 // for Object Initialization as default
1774 if (is_amd() && cpu_family() >= 0x19) {
1775 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1776 UseFastStosb = false;
1777 }
1778 }
1779
1780 #ifdef COMPILER2
1781 if (is_intel() && MaxVectorSize > 16) {
1782 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1783 UseFastStosb = false;
1784 }
1785 }
1786 #endif
1787
1788 // Use XMM/YMM MOVDQU instruction for Object Initialization
1789 if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1790 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1791 UseXMMForObjInit = true;
1792 }
1793 } else if (UseXMMForObjInit) {
1794 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1795 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1796 }
1797
1798 #ifdef COMPILER2
1799 if (FLAG_IS_DEFAULT(AlignVector)) {
1800 // Modern processors allow misaligned memory operations for vectors.
1801 AlignVector = !UseUnalignedLoadStores;
1802 }
1803 #endif // COMPILER2
1804
1805 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1806 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1807 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1808 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1809 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|
1769 FLAG_SET_DEFAULT(UseFastStosb, false);
1770 }
1771
1772 // For AMD Processors use XMM/YMM MOVDQU instructions
1773 // for Object Initialization as default
1774 if (is_amd() && cpu_family() >= 0x19) {
1775 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1776 UseFastStosb = false;
1777 }
1778 }
1779
1780 #ifdef COMPILER2
1781 if (is_intel() && MaxVectorSize > 16) {
1782 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1783 UseFastStosb = false;
1784 }
1785 }
1786 #endif
1787
1788 // Use XMM/YMM MOVDQU instruction for Object Initialization
1789 if (UseSSE >= 2 && UseUnalignedLoadStores) {
1790 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1791 UseXMMForObjInit = true;
1792 }
1793 } else if (UseXMMForObjInit) {
1794 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1795 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1796 }
1797
1798 #ifdef COMPILER2
1799 if (FLAG_IS_DEFAULT(AlignVector)) {
1800 // Modern processors allow misaligned memory operations for vectors.
1801 AlignVector = !UseUnalignedLoadStores;
1802 }
1803 #endif // COMPILER2
1804
1805 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1806 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1807 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1808 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1809 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|