< prev index next >

src/hotspot/share/opto/parse2.cpp

Print this page




  37 #include "opto/divnode.hpp"
  38 #include "opto/idealGraphPrinter.hpp"
  39 #include "opto/matcher.hpp"
  40 #include "opto/memnode.hpp"
  41 #include "opto/mulnode.hpp"
  42 #include "opto/opaquenode.hpp"
  43 #include "opto/parse.hpp"
  44 #include "opto/runtime.hpp"
  45 #include "runtime/deoptimization.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 
  48 #ifndef PRODUCT
  49 extern int explicit_null_checks_inserted,
  50            explicit_null_checks_elided;
  51 #endif
  52 
  53 //---------------------------------array_load----------------------------------
  54 void Parse::array_load(BasicType bt) {
  55   const Type* elemtype = Type::TOP;
  56   bool big_val = bt == T_DOUBLE || bt == T_LONG;
  57   Node* adr = array_addressing(bt, 0, &elemtype);
  58   if (stopped())  return;     // guaranteed null or range check
  59 
  60   pop();                      // index (already used)
  61   Node* array = pop();        // the array itself
  62 
  63   if (elemtype == TypeInt::BOOL) {
  64     bt = T_BOOLEAN;
  65   } else if (bt == T_OBJECT) {
  66     elemtype = _gvn.type(array)->is_aryptr()->elem()->make_oopptr();
  67   }
  68 
  69   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
  70 
  71   Node* ld = access_load_at(array, adr, adr_type, elemtype, bt,
  72                             IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD);
  73   if (big_val) {
  74     push_pair(ld);
  75   } else {
  76     push(ld);
  77   }
  78 }
  79 
  80 
  81 //--------------------------------array_store----------------------------------
  82 void Parse::array_store(BasicType bt) {
  83   const Type* elemtype = Type::TOP;
  84   bool big_val = bt == T_DOUBLE || bt == T_LONG;
  85   Node* adr = array_addressing(bt, big_val ? 2 : 1, &elemtype);
  86   if (stopped())  return;     // guaranteed null or range check
  87   if (bt == T_OBJECT) {
  88     array_store_check();
  89   }
  90   Node* val;                  // Oop to store
  91   if (big_val) {
  92     val = pop_pair();
  93   } else {
  94     val = pop();
  95   }
  96   pop();                      // index (already used)
  97   Node* array = pop();        // the array itself
  98 
  99   if (elemtype == TypeInt::BOOL) {
 100     bt = T_BOOLEAN;
 101   } else if (bt == T_OBJECT) {
 102     elemtype = _gvn.type(array)->is_aryptr()->elem()->make_oopptr();
 103   }
 104 
 105   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 106 
 107   access_store_at(control(), array, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY);
 108 }
 109 
 110 
 111 //------------------------------array_addressing-------------------------------
 112 // Pull array and index from the stack.  Compute pointer-to-element.
 113 Node* Parse::array_addressing(BasicType type, int vals, const Type* *result2) {
 114   Node *idx   = peek(0+vals);   // Get from stack without popping
 115   Node *ary   = peek(1+vals);   // in case of exception
 116 
 117   // Null check the array base, with correct stack contents
 118   ary = null_check(ary, T_ARRAY);
 119   // Compile-time detect of null-exception?
 120   if (stopped())  return top();
 121 
 122   const TypeAryPtr* arytype  = _gvn.type(ary)->is_aryptr();
 123   const TypeInt*    sizetype = arytype->size();
 124   const Type*       elemtype = arytype->elem();
 125 
 126   if (UseUniqueSubclasses && result2 != NULL) {
 127     const Type* el = elemtype->make_ptr();
 128     if (el && el->isa_instptr()) {
 129       const TypeInstPtr* toop = el->is_instptr();
 130       if (toop->klass()->as_instance_klass()->unique_concrete_subklass()) {
 131         // If we load from "AbstractClass[]" we must see "ConcreteSubClass".
 132         const Type* subklass = Type::get_const_type(toop->klass());
 133         elemtype = subklass->join_speculative(el);


 187       if (C->allow_range_check_smearing()) {
 188         // Do not use builtin_throw, since range checks are sometimes
 189         // made more stringent by an optimistic transformation.
 190         // This creates "tentative" range checks at this point,
 191         // which are not guaranteed to throw exceptions.
 192         // See IfNode::Ideal, is_range_check, adjust_check.
 193         uncommon_trap(Deoptimization::Reason_range_check,
 194                       Deoptimization::Action_make_not_entrant,
 195                       NULL, "range_check");
 196       } else {
 197         // If we have already recompiled with the range-check-widening
 198         // heroic optimization turned off, then we must really be throwing
 199         // range check exceptions.
 200         builtin_throw(Deoptimization::Reason_range_check, idx);
 201       }
 202     }
 203   }
 204   // Check for always knowing you are throwing a range-check exception
 205   if (stopped())  return top();
 206 






 207   // Make array address computation control dependent to prevent it
 208   // from floating above the range check during loop optimizations.
 209   Node* ptr = array_element_address(ary, idx, type, sizetype, control());
 210 
 211   if (result2 != NULL)  *result2 = elemtype;
 212 
 213   assert(ptr != top(), "top should go hand-in-hand with stopped");
 214 
 215   return ptr;
 216 }
 217 
 218 
 219 // returns IfNode
 220 IfNode* Parse::jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask, float prob, float cnt) {
 221   Node   *cmp = _gvn.transform(new CmpINode(a, b)); // two cases: shiftcount > 32 and shiftcount <= 32
 222   Node   *tst = _gvn.transform(new BoolNode(cmp, mask));
 223   IfNode *iff = create_and_map_if(control(), tst, prob, cnt);
 224   return iff;
 225 }
 226 


2732       b = null_check_oop(b, &null_ctl, true, true, true);
2733       assert(null_ctl->is_top(), "no null control here");
2734       dec_sp(1);
2735     } else if (_gvn.type(b)->speculative_always_null() &&
2736                !too_many_traps(Deoptimization::Reason_speculate_null_assert)) {
2737       inc_sp(1);
2738       b = null_assert(b);
2739       dec_sp(1);
2740     }
2741     c = _gvn.transform( new CmpPNode(b, a) );
2742     do_ifnull(btest, c);
2743     break;
2744 
2745   case Bytecodes::_if_acmpeq: btest = BoolTest::eq; goto handle_if_acmp;
2746   case Bytecodes::_if_acmpne: btest = BoolTest::ne; goto handle_if_acmp;
2747   handle_if_acmp:
2748     // If this is a backwards branch in the bytecodes, add Safepoint
2749     maybe_add_safepoint(iter().get_dest());
2750     a = pop();
2751     b = pop();






2752     c = _gvn.transform( new CmpPNode(b, a) );
2753     c = optimize_cmp_with_klass(c);
2754     do_if(btest, c);
2755     break;
2756 
2757   case Bytecodes::_ifeq: btest = BoolTest::eq; goto handle_ifxx;
2758   case Bytecodes::_ifne: btest = BoolTest::ne; goto handle_ifxx;
2759   case Bytecodes::_iflt: btest = BoolTest::lt; goto handle_ifxx;
2760   case Bytecodes::_ifle: btest = BoolTest::le; goto handle_ifxx;
2761   case Bytecodes::_ifgt: btest = BoolTest::gt; goto handle_ifxx;
2762   case Bytecodes::_ifge: btest = BoolTest::ge; goto handle_ifxx;
2763   handle_ifxx:
2764     // If this is a backwards branch in the bytecodes, add Safepoint
2765     maybe_add_safepoint(iter().get_dest());
2766     a = _gvn.intcon(0);
2767     b = pop();
2768     c = _gvn.transform( new CmpINode(b, a) );
2769     do_if(btest, c);
2770     break;
2771 




  37 #include "opto/divnode.hpp"
  38 #include "opto/idealGraphPrinter.hpp"
  39 #include "opto/matcher.hpp"
  40 #include "opto/memnode.hpp"
  41 #include "opto/mulnode.hpp"
  42 #include "opto/opaquenode.hpp"
  43 #include "opto/parse.hpp"
  44 #include "opto/runtime.hpp"
  45 #include "runtime/deoptimization.hpp"
  46 #include "runtime/sharedRuntime.hpp"
  47 
  48 #ifndef PRODUCT
  49 extern int explicit_null_checks_inserted,
  50            explicit_null_checks_elided;
  51 #endif
  52 
  53 //---------------------------------array_load----------------------------------
  54 void Parse::array_load(BasicType bt) {
  55   const Type* elemtype = Type::TOP;
  56   bool big_val = bt == T_DOUBLE || bt == T_LONG;
  57   Node* adr = array_addressing(bt, 0, false, &elemtype);
  58   if (stopped())  return;     // guaranteed null or range check
  59 
  60   pop();                      // index (already used)
  61   Node* array = pop();        // the array itself
  62 
  63   if (elemtype == TypeInt::BOOL) {
  64     bt = T_BOOLEAN;
  65   } else if (bt == T_OBJECT) {
  66     elemtype = _gvn.type(array)->is_aryptr()->elem()->make_oopptr();
  67   }
  68 
  69   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
  70 
  71   Node* ld = access_load_at(array, adr, adr_type, elemtype, bt,
  72                             IN_HEAP | IS_ARRAY | C2_CONTROL_DEPENDENT_LOAD);
  73   if (big_val) {
  74     push_pair(ld);
  75   } else {
  76     push(ld);
  77   }
  78 }
  79 
  80 
  81 //--------------------------------array_store----------------------------------
  82 void Parse::array_store(BasicType bt) {
  83   const Type* elemtype = Type::TOP;
  84   bool big_val = bt == T_DOUBLE || bt == T_LONG;
  85   Node* adr = array_addressing(bt, big_val ? 2 : 1, true, &elemtype);
  86   if (stopped())  return;     // guaranteed null or range check
  87   if (bt == T_OBJECT) {
  88     array_store_check();
  89   }
  90   Node* val;                  // Oop to store
  91   if (big_val) {
  92     val = pop_pair();
  93   } else {
  94     val = pop();
  95   }
  96   pop();                      // index (already used)
  97   Node* array = pop();        // the array itself
  98 
  99   if (elemtype == TypeInt::BOOL) {
 100     bt = T_BOOLEAN;
 101   } else if (bt == T_OBJECT) {
 102     elemtype = _gvn.type(array)->is_aryptr()->elem()->make_oopptr();
 103   }
 104 
 105   const TypeAryPtr* adr_type = TypeAryPtr::get_array_body_type(bt);
 106 
 107   access_store_at(control(), array, adr, adr_type, val, elemtype, bt, MO_UNORDERED | IN_HEAP | IS_ARRAY);
 108 }
 109 
 110 
 111 //------------------------------array_addressing-------------------------------
 112 // Pull array and index from the stack.  Compute pointer-to-element.
 113 Node* Parse::array_addressing(BasicType type, int vals, bool is_store, const Type* *result2) {
 114   Node *idx   = peek(0+vals);   // Get from stack without popping
 115   Node *ary   = peek(1+vals);   // in case of exception
 116 
 117   // Null check the array base, with correct stack contents
 118   ary = null_check(ary, T_ARRAY);
 119   // Compile-time detect of null-exception?
 120   if (stopped())  return top();
 121 
 122   const TypeAryPtr* arytype  = _gvn.type(ary)->is_aryptr();
 123   const TypeInt*    sizetype = arytype->size();
 124   const Type*       elemtype = arytype->elem();
 125 
 126   if (UseUniqueSubclasses && result2 != NULL) {
 127     const Type* el = elemtype->make_ptr();
 128     if (el && el->isa_instptr()) {
 129       const TypeInstPtr* toop = el->is_instptr();
 130       if (toop->klass()->as_instance_klass()->unique_concrete_subklass()) {
 131         // If we load from "AbstractClass[]" we must see "ConcreteSubClass".
 132         const Type* subklass = Type::get_const_type(toop->klass());
 133         elemtype = subklass->join_speculative(el);


 187       if (C->allow_range_check_smearing()) {
 188         // Do not use builtin_throw, since range checks are sometimes
 189         // made more stringent by an optimistic transformation.
 190         // This creates "tentative" range checks at this point,
 191         // which are not guaranteed to throw exceptions.
 192         // See IfNode::Ideal, is_range_check, adjust_check.
 193         uncommon_trap(Deoptimization::Reason_range_check,
 194                       Deoptimization::Action_make_not_entrant,
 195                       NULL, "range_check");
 196       } else {
 197         // If we have already recompiled with the range-check-widening
 198         // heroic optimization turned off, then we must really be throwing
 199         // range check exceptions.
 200         builtin_throw(Deoptimization::Reason_range_check, idx);
 201       }
 202     }
 203   }
 204   // Check for always knowing you are throwing a range-check exception
 205   if (stopped())  return top();
 206 
 207   if (is_store) {
 208     ary = access_resolve_for_write(ary);
 209   } else {
 210     ary = access_resolve_for_read(ary);
 211   }
 212 
 213   // Make array address computation control dependent to prevent it
 214   // from floating above the range check during loop optimizations.
 215   Node* ptr = array_element_address(ary, idx, type, sizetype, control());
 216 
 217   if (result2 != NULL)  *result2 = elemtype;
 218 
 219   assert(ptr != top(), "top should go hand-in-hand with stopped");
 220 
 221   return ptr;
 222 }
 223 
 224 
 225 // returns IfNode
 226 IfNode* Parse::jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask, float prob, float cnt) {
 227   Node   *cmp = _gvn.transform(new CmpINode(a, b)); // two cases: shiftcount > 32 and shiftcount <= 32
 228   Node   *tst = _gvn.transform(new BoolNode(cmp, mask));
 229   IfNode *iff = create_and_map_if(control(), tst, prob, cnt);
 230   return iff;
 231 }
 232 


2738       b = null_check_oop(b, &null_ctl, true, true, true);
2739       assert(null_ctl->is_top(), "no null control here");
2740       dec_sp(1);
2741     } else if (_gvn.type(b)->speculative_always_null() &&
2742                !too_many_traps(Deoptimization::Reason_speculate_null_assert)) {
2743       inc_sp(1);
2744       b = null_assert(b);
2745       dec_sp(1);
2746     }
2747     c = _gvn.transform( new CmpPNode(b, a) );
2748     do_ifnull(btest, c);
2749     break;
2750 
2751   case Bytecodes::_if_acmpeq: btest = BoolTest::eq; goto handle_if_acmp;
2752   case Bytecodes::_if_acmpne: btest = BoolTest::ne; goto handle_if_acmp;
2753   handle_if_acmp:
2754     // If this is a backwards branch in the bytecodes, add Safepoint
2755     maybe_add_safepoint(iter().get_dest());
2756     a = pop();
2757     b = pop();
2758 #if INCLUDE_SHENANDOAHGC
2759     if (UseShenandoahGC && ShenandoahAcmpBarrier) {
2760       a = access_resolve_for_write(a);
2761       b = access_resolve_for_write(b);
2762     }
2763 #endif
2764     c = _gvn.transform( new CmpPNode(b, a) );
2765     c = optimize_cmp_with_klass(c);
2766     do_if(btest, c);
2767     break;
2768 
2769   case Bytecodes::_ifeq: btest = BoolTest::eq; goto handle_ifxx;
2770   case Bytecodes::_ifne: btest = BoolTest::ne; goto handle_ifxx;
2771   case Bytecodes::_iflt: btest = BoolTest::lt; goto handle_ifxx;
2772   case Bytecodes::_ifle: btest = BoolTest::le; goto handle_ifxx;
2773   case Bytecodes::_ifgt: btest = BoolTest::gt; goto handle_ifxx;
2774   case Bytecodes::_ifge: btest = BoolTest::ge; goto handle_ifxx;
2775   handle_ifxx:
2776     // If this is a backwards branch in the bytecodes, add Safepoint
2777     maybe_add_safepoint(iter().get_dest());
2778     a = _gvn.intcon(0);
2779     b = pop();
2780     c = _gvn.transform( new CmpINode(b, a) );
2781     do_if(btest, c);
2782     break;
2783 


< prev index next >