1792 FLAG_SET_DEFAULT(UseFastStosb, false);
1793 }
1794
1795 // For AMD Processors use XMM/YMM MOVDQU instructions
1796 // for Object Initialization as default
1797 if (is_amd() && cpu_family() >= 0x19) {
1798 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1799 UseFastStosb = false;
1800 }
1801 }
1802
1803 #ifdef COMPILER2
1804 if (is_intel() && MaxVectorSize > 16) {
1805 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1806 UseFastStosb = false;
1807 }
1808 }
1809 #endif
1810
1811 // Use XMM/YMM MOVDQU instruction for Object Initialization
1812 if (!UseFastStosb && UseUnalignedLoadStores) {
1813 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1814 UseXMMForObjInit = true;
1815 }
1816 } else if (UseXMMForObjInit) {
1817 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1818 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1819 }
1820
1821 #ifdef COMPILER2
1822 if (FLAG_IS_DEFAULT(AlignVector)) {
1823 // Modern processors allow misaligned memory operations for vectors.
1824 AlignVector = !UseUnalignedLoadStores;
1825 }
1826 #endif // COMPILER2
1827
1828 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1829 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1830 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1831 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1832 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|
1792 FLAG_SET_DEFAULT(UseFastStosb, false);
1793 }
1794
1795 // For AMD Processors use XMM/YMM MOVDQU instructions
1796 // for Object Initialization as default
1797 if (is_amd() && cpu_family() >= 0x19) {
1798 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1799 UseFastStosb = false;
1800 }
1801 }
1802
1803 #ifdef COMPILER2
1804 if (is_intel() && MaxVectorSize > 16) {
1805 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1806 UseFastStosb = false;
1807 }
1808 }
1809 #endif
1810
1811 // Use XMM/YMM MOVDQU instruction for Object Initialization
1812 if (UseUnalignedLoadStores) {
1813 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1814 UseXMMForObjInit = true;
1815 }
1816 } else if (UseXMMForObjInit) {
1817 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1818 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1819 }
1820
1821 #ifdef COMPILER2
1822 if (FLAG_IS_DEFAULT(AlignVector)) {
1823 // Modern processors allow misaligned memory operations for vectors.
1824 AlignVector = !UseUnalignedLoadStores;
1825 }
1826 #endif // COMPILER2
1827
1828 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1829 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1830 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1831 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1832 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|