264 ciField* field = lf->field();
265 if (field->is_static_constant()) {
266 // Constant field loads are usually folded during parsing.
267 // But it doesn't happen with PatchALot, ScavengeRootsInCode < 2, or when
268 // holder class is being initialized during parsing (for static fields).
269 ciObject* c = field->constant_value().as_object();
270 if (!c->is_null_object()) {
271 set_constant(c->as_array()->length());
272 }
273 }
274 }
275 }
276
277 void Canonicalizer::do_LoadIndexed (LoadIndexed* x) {
278 StableArrayConstant* array = x->array()->type()->as_StableArrayConstant();
279 IntConstant* index = x->index()->type()->as_IntConstant();
280
281 assert(array == NULL || FoldStableValues, "not enabled");
282
283 // Constant fold loads from stable arrays.
284 if (!x->mismatched() && array != NULL && index != NULL) {
285 jint idx = index->value();
286 if (idx < 0 || idx >= array->value()->length()) {
287 // Leave the load as is. The range check will handle it.
288 return;
289 }
290
291 ciConstant field_val = array->value()->element_value(idx);
292 if (!field_val.is_null_or_zero()) {
293 jint dimension = array->dimension();
294 assert(dimension <= array->value()->array_type()->dimension(), "inconsistent info");
295 ValueType* value = NULL;
296 if (dimension > 1) {
297 // Preserve information about the dimension for the element.
298 assert(field_val.as_object()->is_array(), "not an array");
299 value = new StableArrayConstant(field_val.as_object()->as_array(), dimension - 1);
300 } else {
301 assert(dimension == 1, "sanity");
302 value = as_ValueType(field_val);
303 }
304 set_canonical(new Constant(value));
623 if (op2 && op2->op() == Bytecodes::_iand && op2->y()->type()->is_constant()) {
624 jint safebits = 0;
625 jint mask = op2->y()->type()->as_IntConstant()->value();
626 switch (x->op()) {
627 case Bytecodes::_i2b: safebits = 0x7f; break;
628 case Bytecodes::_i2s: safebits = 0x7fff; break;
629 case Bytecodes::_i2c: safebits = 0xffff; break;
630 default : break;
631 }
632 // When casting a masked integer to a smaller signed type, if
633 // the mask doesn't include the sign bit the cast isn't needed.
634 if (safebits && (mask & ~safebits) == 0) {
635 set_canonical(x->value());
636 }
637 }
638 }
639
640 }
641
642 void Canonicalizer::do_NullCheck (NullCheck* x) {
643 if (x->obj()->as_NewArray() != NULL || x->obj()->as_NewInstance() != NULL) {
644 set_canonical(x->obj());
645 } else {
646 Constant* con = x->obj()->as_Constant();
647 if (con) {
648 ObjectType* c = con->type()->as_ObjectType();
649 if (c && c->is_loaded()) {
650 ObjectConstant* oc = c->as_ObjectConstant();
651 if (!oc || !oc->value()->is_null_object()) {
652 set_canonical(con);
653 }
654 }
655 }
656 }
657 }
658
659 void Canonicalizer::do_TypeCast (TypeCast* x) {}
660 void Canonicalizer::do_Invoke (Invoke* x) {}
661 void Canonicalizer::do_NewInstance (NewInstance* x) {}
662 void Canonicalizer::do_NewTypeArray (NewTypeArray* x) {}
663 void Canonicalizer::do_NewObjectArray (NewObjectArray* x) {}
664 void Canonicalizer::do_NewMultiArray (NewMultiArray* x) {}
665 void Canonicalizer::do_CheckCast (CheckCast* x) {
666 if (x->klass()->is_loaded()) {
667 Value obj = x->obj();
668 ciType* klass = obj->exact_type();
669 if (klass == NULL) {
670 klass = obj->declared_type();
671 }
672 if (klass != NULL && klass->is_loaded()) {
673 bool is_interface = klass->is_instance_klass() &&
674 klass->as_instance_klass()->is_interface();
675 // Interface casts can't be statically optimized away since verifier doesn't
676 // enforce interface types in bytecode.
677 if (!is_interface && klass->is_subtype_of(x->klass())) {
678 set_canonical(obj);
679 return;
680 }
681 }
682 // checkcast of null returns null
683 if (obj->as_Constant() && obj->type()->as_ObjectType()->constant_value()->is_null_object()) {
684 set_canonical(obj);
685 }
686 }
687 }
688 void Canonicalizer::do_InstanceOf (InstanceOf* x) {
689 if (x->klass()->is_loaded()) {
690 Value obj = x->obj();
691 ciType* exact = obj->exact_type();
692 if (exact != NULL && exact->is_loaded() && (obj->as_NewInstance() || obj->as_NewArray())) {
693 set_constant(exact->is_subtype_of(x->klass()) ? 1 : 0);
694 return;
695 }
696 // instanceof null returns false
697 if (obj->as_Constant() && obj->type()->as_ObjectType()->constant_value()->is_null_object()) {
698 set_constant(0);
699 }
700 }
701
702 }
703 void Canonicalizer::do_MonitorEnter (MonitorEnter* x) {}
704 void Canonicalizer::do_MonitorExit (MonitorExit* x) {}
705 void Canonicalizer::do_BlockBegin (BlockBegin* x) {}
706 void Canonicalizer::do_Goto (Goto* x) {}
707
708
709 static bool is_true(jlong x, If::Condition cond, jlong y) {
710 switch (cond) {
711 case If::eql: return x == y;
712 case If::neq: return x != y;
713 case If::lss: return x < y;
714 case If::leq: return x <= y;
715 case If::gtr: return x > y;
716 case If::geq: return x >= y;
717 default:
795 do_If(canon);
796 } else {
797 if (compilation()->profile_branches() || compilation()->is_profiling()) {
798 // TODO: If profiling, leave floating point comparisons unoptimized.
799 // We currently do not support profiling of the unordered case.
800 switch(cmp->op()) {
801 case Bytecodes::_fcmpl: case Bytecodes::_fcmpg:
802 case Bytecodes::_dcmpl: case Bytecodes::_dcmpg:
803 set_canonical(x);
804 return;
805 default:
806 break;
807 }
808 }
809 set_bci(cmp->state_before()->bci());
810 set_canonical(canon);
811 }
812 }
813 }
814 } else if (rt == objectNull &&
815 (l->as_NewInstance() || l->as_NewArray() ||
816 (l->as_Local() && l->as_Local()->is_receiver()))) {
817 if (x->cond() == Instruction::eql) {
818 BlockBegin* sux = x->fsux();
819 set_canonical(new Goto(sux, x->state_before(), is_safepoint(x, sux)));
820 } else {
821 assert(x->cond() == Instruction::neq, "only other valid case");
822 BlockBegin* sux = x->tsux();
823 set_canonical(new Goto(sux, x->state_before(), is_safepoint(x, sux)));
824 }
825 }
826 }
827
828
829 void Canonicalizer::do_TableSwitch(TableSwitch* x) {
830 if (x->tag()->type()->is_constant()) {
831 int v = x->tag()->type()->as_IntConstant()->value();
832 BlockBegin* sux = x->default_sux();
833 if (v >= x->lo_key() && v <= x->hi_key()) {
834 sux = x->sux_at(v - x->lo_key());
835 }
846 if (v == x->key_at(i)) {
847 sux = x->sux_at(i);
848 }
849 }
850 set_canonical(new Goto(sux, x->state_before(), is_safepoint(x, sux)));
851 }
852 }
853
854
855 void Canonicalizer::do_Return (Return* x) {}
856 void Canonicalizer::do_Throw (Throw* x) {}
857 void Canonicalizer::do_Base (Base* x) {}
858 void Canonicalizer::do_OsrEntry (OsrEntry* x) {}
859 void Canonicalizer::do_ExceptionObject(ExceptionObject* x) {}
860 void Canonicalizer::do_RoundFP (RoundFP* x) {}
861 void Canonicalizer::do_UnsafeGet (UnsafeGet* x) {}
862 void Canonicalizer::do_UnsafePut (UnsafePut* x) {}
863 void Canonicalizer::do_UnsafeGetAndSet(UnsafeGetAndSet* x) {}
864 void Canonicalizer::do_ProfileCall (ProfileCall* x) {}
865 void Canonicalizer::do_ProfileReturnType(ProfileReturnType* x) {}
866 void Canonicalizer::do_ProfileInvoke (ProfileInvoke* x) {}
867 void Canonicalizer::do_RuntimeCall (RuntimeCall* x) {}
868 void Canonicalizer::do_RangeCheckPredicate(RangeCheckPredicate* x) {}
869 #ifdef ASSERT
870 void Canonicalizer::do_Assert (Assert* x) {}
871 #endif
872 void Canonicalizer::do_MemBar (MemBar* x) {}
|
264 ciField* field = lf->field();
265 if (field->is_static_constant()) {
266 // Constant field loads are usually folded during parsing.
267 // But it doesn't happen with PatchALot, ScavengeRootsInCode < 2, or when
268 // holder class is being initialized during parsing (for static fields).
269 ciObject* c = field->constant_value().as_object();
270 if (!c->is_null_object()) {
271 set_constant(c->as_array()->length());
272 }
273 }
274 }
275 }
276
277 void Canonicalizer::do_LoadIndexed (LoadIndexed* x) {
278 StableArrayConstant* array = x->array()->type()->as_StableArrayConstant();
279 IntConstant* index = x->index()->type()->as_IntConstant();
280
281 assert(array == NULL || FoldStableValues, "not enabled");
282
283 // Constant fold loads from stable arrays.
284 if (!x->should_profile() && !x->mismatched() && array != NULL && index != NULL) {
285 jint idx = index->value();
286 if (idx < 0 || idx >= array->value()->length()) {
287 // Leave the load as is. The range check will handle it.
288 return;
289 }
290
291 ciConstant field_val = array->value()->element_value(idx);
292 if (!field_val.is_null_or_zero()) {
293 jint dimension = array->dimension();
294 assert(dimension <= array->value()->array_type()->dimension(), "inconsistent info");
295 ValueType* value = NULL;
296 if (dimension > 1) {
297 // Preserve information about the dimension for the element.
298 assert(field_val.as_object()->is_array(), "not an array");
299 value = new StableArrayConstant(field_val.as_object()->as_array(), dimension - 1);
300 } else {
301 assert(dimension == 1, "sanity");
302 value = as_ValueType(field_val);
303 }
304 set_canonical(new Constant(value));
623 if (op2 && op2->op() == Bytecodes::_iand && op2->y()->type()->is_constant()) {
624 jint safebits = 0;
625 jint mask = op2->y()->type()->as_IntConstant()->value();
626 switch (x->op()) {
627 case Bytecodes::_i2b: safebits = 0x7f; break;
628 case Bytecodes::_i2s: safebits = 0x7fff; break;
629 case Bytecodes::_i2c: safebits = 0xffff; break;
630 default : break;
631 }
632 // When casting a masked integer to a smaller signed type, if
633 // the mask doesn't include the sign bit the cast isn't needed.
634 if (safebits && (mask & ~safebits) == 0) {
635 set_canonical(x->value());
636 }
637 }
638 }
639
640 }
641
642 void Canonicalizer::do_NullCheck (NullCheck* x) {
643 if (x->obj()->as_NewArray() != NULL || x->obj()->as_NewInstance() != NULL || x->obj()->as_NewInlineTypeInstance()) {
644 set_canonical(x->obj());
645 } else {
646 Constant* con = x->obj()->as_Constant();
647 if (con) {
648 ObjectType* c = con->type()->as_ObjectType();
649 if (c && c->is_loaded()) {
650 ObjectConstant* oc = c->as_ObjectConstant();
651 if (!oc || !oc->value()->is_null_object()) {
652 set_canonical(con);
653 }
654 }
655 }
656 }
657 }
658
659 void Canonicalizer::do_TypeCast (TypeCast* x) {}
660 void Canonicalizer::do_Invoke (Invoke* x) {}
661 void Canonicalizer::do_NewInstance (NewInstance* x) {}
662 void Canonicalizer::do_NewInlineTypeInstance(NewInlineTypeInstance* x) {}
663 void Canonicalizer::do_NewTypeArray (NewTypeArray* x) {}
664 void Canonicalizer::do_NewObjectArray (NewObjectArray* x) {}
665 void Canonicalizer::do_NewMultiArray (NewMultiArray* x) {}
666 void Canonicalizer::do_Deoptimize (Deoptimize* x) {}
667 void Canonicalizer::do_CheckCast (CheckCast* x) {
668 if (x->klass()->is_loaded()) {
669 Value obj = x->obj();
670 ciType* klass = obj->exact_type();
671 if (klass == NULL) {
672 klass = obj->declared_type();
673 }
674 if (klass != NULL && klass->is_loaded()) {
675 bool is_interface = klass->is_instance_klass() &&
676 klass->as_instance_klass()->is_interface();
677 // Interface casts can't be statically optimized away since verifier doesn't
678 // enforce interface types in bytecode.
679 if (!is_interface && klass->is_subtype_of(x->klass()) && (!x->is_null_free() || obj->is_null_free())) {
680 assert(!x->klass()->is_inlinetype() || x->klass() == klass, "Inline klasses can't have subtypes");
681 set_canonical(obj);
682 return;
683 }
684 }
685 // checkcast of null returns null for non null-free klasses
686 if (!x->is_null_free() && obj->is_null_obj()) {
687 set_canonical(obj);
688 }
689 }
690 }
691 void Canonicalizer::do_InstanceOf (InstanceOf* x) {
692 if (x->klass()->is_loaded()) {
693 Value obj = x->obj();
694 ciType* exact = obj->exact_type();
695 if (exact != NULL && exact->is_loaded() && (obj->as_NewInstance() || obj->as_NewArray() || obj->as_NewInlineTypeInstance())) {
696 set_constant(exact->is_subtype_of(x->klass()) ? 1 : 0);
697 return;
698 }
699 // instanceof null returns false
700 if (obj->as_Constant() && obj->is_null_obj()) {
701 set_constant(0);
702 }
703 }
704
705 }
706 void Canonicalizer::do_MonitorEnter (MonitorEnter* x) {}
707 void Canonicalizer::do_MonitorExit (MonitorExit* x) {}
708 void Canonicalizer::do_BlockBegin (BlockBegin* x) {}
709 void Canonicalizer::do_Goto (Goto* x) {}
710
711
712 static bool is_true(jlong x, If::Condition cond, jlong y) {
713 switch (cond) {
714 case If::eql: return x == y;
715 case If::neq: return x != y;
716 case If::lss: return x < y;
717 case If::leq: return x <= y;
718 case If::gtr: return x > y;
719 case If::geq: return x >= y;
720 default:
798 do_If(canon);
799 } else {
800 if (compilation()->profile_branches() || compilation()->is_profiling()) {
801 // TODO: If profiling, leave floating point comparisons unoptimized.
802 // We currently do not support profiling of the unordered case.
803 switch(cmp->op()) {
804 case Bytecodes::_fcmpl: case Bytecodes::_fcmpg:
805 case Bytecodes::_dcmpl: case Bytecodes::_dcmpg:
806 set_canonical(x);
807 return;
808 default:
809 break;
810 }
811 }
812 set_bci(cmp->state_before()->bci());
813 set_canonical(canon);
814 }
815 }
816 }
817 } else if (rt == objectNull &&
818 (l->as_NewInstance() || l->as_NewArray() || l->as_NewInlineTypeInstance() ||
819 (l->as_Local() && l->as_Local()->is_receiver()))) {
820 if (x->cond() == Instruction::eql) {
821 BlockBegin* sux = x->fsux();
822 set_canonical(new Goto(sux, x->state_before(), is_safepoint(x, sux)));
823 } else {
824 assert(x->cond() == Instruction::neq, "only other valid case");
825 BlockBegin* sux = x->tsux();
826 set_canonical(new Goto(sux, x->state_before(), is_safepoint(x, sux)));
827 }
828 }
829 }
830
831
832 void Canonicalizer::do_TableSwitch(TableSwitch* x) {
833 if (x->tag()->type()->is_constant()) {
834 int v = x->tag()->type()->as_IntConstant()->value();
835 BlockBegin* sux = x->default_sux();
836 if (v >= x->lo_key() && v <= x->hi_key()) {
837 sux = x->sux_at(v - x->lo_key());
838 }
849 if (v == x->key_at(i)) {
850 sux = x->sux_at(i);
851 }
852 }
853 set_canonical(new Goto(sux, x->state_before(), is_safepoint(x, sux)));
854 }
855 }
856
857
858 void Canonicalizer::do_Return (Return* x) {}
859 void Canonicalizer::do_Throw (Throw* x) {}
860 void Canonicalizer::do_Base (Base* x) {}
861 void Canonicalizer::do_OsrEntry (OsrEntry* x) {}
862 void Canonicalizer::do_ExceptionObject(ExceptionObject* x) {}
863 void Canonicalizer::do_RoundFP (RoundFP* x) {}
864 void Canonicalizer::do_UnsafeGet (UnsafeGet* x) {}
865 void Canonicalizer::do_UnsafePut (UnsafePut* x) {}
866 void Canonicalizer::do_UnsafeGetAndSet(UnsafeGetAndSet* x) {}
867 void Canonicalizer::do_ProfileCall (ProfileCall* x) {}
868 void Canonicalizer::do_ProfileReturnType(ProfileReturnType* x) {}
869 void Canonicalizer::do_ProfileInvoke (ProfileInvoke* x) {}
870 void Canonicalizer::do_ProfileACmpTypes (ProfileACmpTypes* x) {}
871 void Canonicalizer::do_RuntimeCall (RuntimeCall* x) {}
872 void Canonicalizer::do_RangeCheckPredicate(RangeCheckPredicate* x) {}
873 #ifdef ASSERT
874 void Canonicalizer::do_Assert (Assert* x) {}
875 #endif
876 void Canonicalizer::do_MemBar (MemBar* x) {}
|