1718 FLAG_SET_DEFAULT(UseFastStosb, false);
1719 }
1720
1721 // For AMD Processors use XMM/YMM MOVDQU instructions
1722 // for Object Initialization as default
1723 if (is_amd() && cpu_family() >= 0x19) {
1724 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1725 UseFastStosb = false;
1726 }
1727 }
1728
1729 #ifdef COMPILER2
1730 if (is_intel() && MaxVectorSize > 16) {
1731 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1732 UseFastStosb = false;
1733 }
1734 }
1735 #endif
1736
1737 // Use XMM/YMM MOVDQU instruction for Object Initialization
1738 if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1739 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1740 UseXMMForObjInit = true;
1741 }
1742 } else if (UseXMMForObjInit) {
1743 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1744 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1745 }
1746
1747 #ifdef COMPILER2
1748 if (FLAG_IS_DEFAULT(AlignVector)) {
1749 // Modern processors allow misaligned memory operations for vectors.
1750 AlignVector = !UseUnalignedLoadStores;
1751 }
1752 #endif // COMPILER2
1753
1754 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1755 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1756 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1757 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1758 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|
1718 FLAG_SET_DEFAULT(UseFastStosb, false);
1719 }
1720
1721 // For AMD Processors use XMM/YMM MOVDQU instructions
1722 // for Object Initialization as default
1723 if (is_amd() && cpu_family() >= 0x19) {
1724 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1725 UseFastStosb = false;
1726 }
1727 }
1728
1729 #ifdef COMPILER2
1730 if (is_intel() && MaxVectorSize > 16) {
1731 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1732 UseFastStosb = false;
1733 }
1734 }
1735 #endif
1736
1737 // Use XMM/YMM MOVDQU instruction for Object Initialization
1738 if (UseSSE >= 2 && UseUnalignedLoadStores) {
1739 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1740 UseXMMForObjInit = true;
1741 }
1742 } else if (UseXMMForObjInit) {
1743 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1744 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1745 }
1746
1747 #ifdef COMPILER2
1748 if (FLAG_IS_DEFAULT(AlignVector)) {
1749 // Modern processors allow misaligned memory operations for vectors.
1750 AlignVector = !UseUnalignedLoadStores;
1751 }
1752 #endif // COMPILER2
1753
1754 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1755 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1756 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1757 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1758 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|