< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/register.hpp"
  26 #include "ci/ciObjArray.hpp"
  27 #include "ci/ciUtilities.hpp"
  28 #include "classfile/javaClasses.hpp"
  29 #include "compiler/compileLog.hpp"
  30 #include "gc/shared/barrierSet.hpp"
  31 #include "gc/shared/c2/barrierSetC2.hpp"
  32 #include "interpreter/interpreter.hpp"
  33 #include "memory/resourceArea.hpp"

  34 #include "opto/addnode.hpp"
  35 #include "opto/castnode.hpp"
  36 #include "opto/convertnode.hpp"
  37 #include "opto/graphKit.hpp"
  38 #include "opto/idealKit.hpp"
  39 #include "opto/intrinsicnode.hpp"
  40 #include "opto/locknode.hpp"
  41 #include "opto/machnode.hpp"
  42 #include "opto/opaquenode.hpp"
  43 #include "opto/parse.hpp"
  44 #include "opto/rootnode.hpp"
  45 #include "opto/runtime.hpp"
  46 #include "opto/subtypenode.hpp"
  47 #include "runtime/deoptimization.hpp"
  48 #include "runtime/sharedRuntime.hpp"
  49 #include "utilities/bitMap.inline.hpp"
  50 #include "utilities/growableArray.hpp"
  51 #include "utilities/powerOfTwo.hpp"
  52 
  53 //----------------------------GraphKit-----------------------------------------

2211   case Deoptimization::Action_make_not_entrant:
2212     C->set_trap_can_recompile(true);
2213     break;
2214   case Deoptimization::Action_none:
2215   case Deoptimization::Action_make_not_compilable:
2216     break;
2217   default:
2218 #ifdef ASSERT
2219     fatal("unknown action %d: %s", action, Deoptimization::trap_action_name(action));
2220 #endif
2221     break;
2222   }
2223 
2224   if (TraceOptoParse) {
2225     char buf[100];
2226     tty->print_cr("Uncommon trap %s at bci:%d",
2227                   Deoptimization::format_trap_request(buf, sizeof(buf),
2228                                                       trap_request), bci());
2229   }
2230 










2231   CompileLog* log = C->log();
2232   if (log != nullptr) {
2233     int kid = (klass == nullptr)? -1: log->identify(klass);
2234     log->begin_elem("uncommon_trap bci='%d'", bci());
2235     char buf[100];
2236     log->print(" %s", Deoptimization::format_trap_request(buf, sizeof(buf),
2237                                                           trap_request));
2238     if (kid >= 0)         log->print(" klass='%d'", kid);
2239     if (comment != nullptr)  log->print(" comment='%s'", comment);
2240     log->end_elem();
2241   }
2242 
2243   // Make sure any guarding test views this path as very unlikely
2244   Node *i0 = control()->in(0);
2245   if (i0 != nullptr && i0->is_If()) {        // Found a guarding if test?
2246     IfNode *iff = i0->as_If();
2247     float f = iff->_prob;   // Get prob
2248     if (control()->Opcode() == Op_IfTrue) {
2249       if (f > PROB_UNLIKELY_MAG(4))
2250         iff->_prob = PROB_MIN;

3057   if (UncommonNullCast               // Cutout for this technique
3058       && obj != null()               // And not the -Xcomp stupid case?
3059       && !too_many_traps(reason)
3060       ) {
3061     if (speculating) {
3062       return true;
3063     }
3064     if (data == nullptr)
3065       // Edge case:  no mature data.  Be optimistic here.
3066       return true;
3067     // If the profile has not seen a null, assume it won't happen.
3068     assert(java_bc() == Bytecodes::_checkcast ||
3069            java_bc() == Bytecodes::_instanceof ||
3070            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3071     return !data->as_BitData()->null_seen();
3072   }
3073   speculating = false;
3074   return false;
3075 }
3076 









































































3077 void GraphKit::guard_klass_being_initialized(Node* klass) {
3078   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3079   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3080   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3081                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3082                                     T_BYTE, MemNode::acquire);
3083   init_state = _gvn.transform(init_state);
3084 
3085   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3086 
3087   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3088   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3089 
3090   { BuildCutout unless(this, tst, PROB_MAX);
3091     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3092   }
3093 }
3094 
3095 void GraphKit::guard_init_thread(Node* klass) {
3096   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3097   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3098 
3099   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3100                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3101                                      T_ADDRESS, MemNode::unordered);
3102   init_thread = _gvn.transform(init_thread);
3103 
3104   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3105 
3106   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3107   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3108 
3109   { BuildCutout unless(this, tst, PROB_MAX);
3110     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3111   }
3112 }
3113 
3114 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3115   if (ik->is_being_initialized()) {
3116     if (C->needs_clinit_barrier(ik, context)) {
3117       Node* klass = makecon(TypeKlassPtr::make(ik));
3118       guard_klass_being_initialized(klass);
3119       guard_init_thread(klass);
3120       insert_mem_bar(Op_MemBarCPUOrder);
3121     }
3122   } else if (ik->is_initialized()) {
3123     return; // no barrier needed
3124   } else {




3125     uncommon_trap(Deoptimization::Reason_uninitialized,
3126                   Deoptimization::Action_reinterpret,
3127                   nullptr);
3128   }
3129 }
3130 
3131 //------------------------maybe_cast_profiled_receiver-------------------------
3132 // If the profile has seen exactly one type, narrow to exactly that type.
3133 // Subsequent type checks will always fold up.
3134 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3135                                              const TypeKlassPtr* require_klass,
3136                                              ciKlass* spec_klass,
3137                                              bool safe_for_replace) {
3138   if (!UseTypeProfile || !TypeProfileCasts) return nullptr;
3139 
3140   Deoptimization::DeoptReason reason = Deoptimization::reason_class_check(spec_klass != nullptr);
3141 
3142   // Make sure we haven't already deoptimized from this tactic.
3143   if (too_many_traps_or_recompiles(reason))
3144     return nullptr;

  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "asm/register.hpp"
  26 #include "ci/ciObjArray.hpp"
  27 #include "ci/ciUtilities.hpp"
  28 #include "classfile/javaClasses.hpp"
  29 #include "compiler/compileLog.hpp"
  30 #include "gc/shared/barrierSet.hpp"
  31 #include "gc/shared/c2/barrierSetC2.hpp"
  32 #include "interpreter/interpreter.hpp"
  33 #include "memory/resourceArea.hpp"
  34 #include "oops/trainingData.hpp"
  35 #include "opto/addnode.hpp"
  36 #include "opto/castnode.hpp"
  37 #include "opto/convertnode.hpp"
  38 #include "opto/graphKit.hpp"
  39 #include "opto/idealKit.hpp"
  40 #include "opto/intrinsicnode.hpp"
  41 #include "opto/locknode.hpp"
  42 #include "opto/machnode.hpp"
  43 #include "opto/opaquenode.hpp"
  44 #include "opto/parse.hpp"
  45 #include "opto/rootnode.hpp"
  46 #include "opto/runtime.hpp"
  47 #include "opto/subtypenode.hpp"
  48 #include "runtime/deoptimization.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "utilities/bitMap.inline.hpp"
  51 #include "utilities/growableArray.hpp"
  52 #include "utilities/powerOfTwo.hpp"
  53 
  54 //----------------------------GraphKit-----------------------------------------

2212   case Deoptimization::Action_make_not_entrant:
2213     C->set_trap_can_recompile(true);
2214     break;
2215   case Deoptimization::Action_none:
2216   case Deoptimization::Action_make_not_compilable:
2217     break;
2218   default:
2219 #ifdef ASSERT
2220     fatal("unknown action %d: %s", action, Deoptimization::trap_action_name(action));
2221 #endif
2222     break;
2223   }
2224 
2225   if (TraceOptoParse) {
2226     char buf[100];
2227     tty->print_cr("Uncommon trap %s at bci:%d",
2228                   Deoptimization::format_trap_request(buf, sizeof(buf),
2229                                                       trap_request), bci());
2230   }
2231 
2232   if (PreloadReduceTraps && Compile::current()->for_preload() &&
2233       (action != Deoptimization::Action_none)) {
2234     ResourceMark rm;
2235     ciMethod* cim = Compile::current()->method();
2236     log_debug(aot, codecache, deoptimization)("Uncommon trap in preload code: reason=%s action=%s method=%s::%s bci=%d, %s",
2237                   Deoptimization::trap_reason_name(reason), Deoptimization::trap_action_name(action),
2238                   cim->holder()->name()->as_klass_external_name(), cim->name()->as_klass_external_name(),
2239                   bci(), comment);
2240   }
2241 
2242   CompileLog* log = C->log();
2243   if (log != nullptr) {
2244     int kid = (klass == nullptr)? -1: log->identify(klass);
2245     log->begin_elem("uncommon_trap bci='%d'", bci());
2246     char buf[100];
2247     log->print(" %s", Deoptimization::format_trap_request(buf, sizeof(buf),
2248                                                           trap_request));
2249     if (kid >= 0)         log->print(" klass='%d'", kid);
2250     if (comment != nullptr)  log->print(" comment='%s'", comment);
2251     log->end_elem();
2252   }
2253 
2254   // Make sure any guarding test views this path as very unlikely
2255   Node *i0 = control()->in(0);
2256   if (i0 != nullptr && i0->is_If()) {        // Found a guarding if test?
2257     IfNode *iff = i0->as_If();
2258     float f = iff->_prob;   // Get prob
2259     if (control()->Opcode() == Op_IfTrue) {
2260       if (f > PROB_UNLIKELY_MAG(4))
2261         iff->_prob = PROB_MIN;

3068   if (UncommonNullCast               // Cutout for this technique
3069       && obj != null()               // And not the -Xcomp stupid case?
3070       && !too_many_traps(reason)
3071       ) {
3072     if (speculating) {
3073       return true;
3074     }
3075     if (data == nullptr)
3076       // Edge case:  no mature data.  Be optimistic here.
3077       return true;
3078     // If the profile has not seen a null, assume it won't happen.
3079     assert(java_bc() == Bytecodes::_checkcast ||
3080            java_bc() == Bytecodes::_instanceof ||
3081            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3082     return !data->as_BitData()->null_seen();
3083   }
3084   speculating = false;
3085   return false;
3086 }
3087 
3088 void GraphKit::guard_klass_is_initialized(Node* klass) {
3089   assert(C->do_clinit_barriers(), "should be called only for clinit barriers");
3090   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3091   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3092   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3093                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3094                                     T_BYTE, MemNode::unordered);
3095   init_state = _gvn.transform(init_state);
3096 
3097   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
3098 
3099   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3100   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3101 
3102   switch (ClassInitBarrierMode) {
3103     case 1: { // uncommon trap on slow path
3104       BuildCutout unless(this, tst, PROB_MAX);
3105       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3106       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3107       break;
3108     }
3109     case 2: { // runtime call on slow path
3110       if (StressClassInitBarriers) {
3111         tst = makecon(TypeInt::ZERO); // always go through slow path
3112       }
3113       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3114 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3115 
3116       RegionNode* r = new RegionNode(3);
3117       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3118 
3119       set_control(_gvn.transform(new IfFalseNode(iff)));
3120 
3121       if (!stopped()) {
3122         kill_dead_locals();
3123 
3124         Node* call = make_runtime_call(RC_NO_LEAF,
3125                                        OptoRuntime::class_init_barrier_Type(),
3126                                        OptoRuntime::class_init_barrier_Java(),
3127                                        nullptr, TypePtr::BOTTOM,
3128                                        klass);
3129         // Deoptimization during class init barrier execution should trigger current bytecode reexecution.
3130         call->jvms()->set_should_reexecute(true);
3131 
3132         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3133         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3134         // Compilation fails when distinct exception states are combined.
3135         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3136 
3137         Node* fast_io  = call->in(TypeFunc::I_O);
3138         Node* fast_mem = call->in(TypeFunc::Memory);
3139         // These two phis are pre-filled with copies of of the fast IO and Memory
3140         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3141         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3142 
3143         r->init_req(2, control());
3144         io_phi->init_req(2, i_o());
3145         mem_phi->init_req(2, reset_memory());
3146 
3147         set_all_memory(_gvn.transform(mem_phi));
3148         set_i_o(_gvn.transform(io_phi));
3149       } else {
3150         r->init_req(2, top());
3151       }
3152       set_control(_gvn.transform(r));
3153       break;
3154     }
3155 
3156     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3157   }
3158   C->set_has_clinit_barriers(true);
3159 }
3160 
3161 void GraphKit::guard_klass_being_initialized(Node* klass) {
3162   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3163   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3164   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3165                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3166                                     T_BYTE, MemNode::acquire);
3167   init_state = _gvn.transform(init_state);
3168 
3169   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3170 
3171   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3172   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3173 
3174   { BuildCutout unless(this, tst, PROB_MAX);
3175     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3176   }
3177 }
3178 
3179 void GraphKit::guard_init_thread(Node* klass) {
3180   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3181   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3182 
3183   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3184                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3185                                      T_ADDRESS, MemNode::unordered);
3186   init_thread = _gvn.transform(init_thread);
3187 
3188   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3189 
3190   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3191   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3192 
3193   { BuildCutout unless(this, tst, PROB_MAX);
3194     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3195   }
3196 }
3197 
3198 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3199   if (C->do_clinit_barriers()) {
3200     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3201     guard_klass_is_initialized(klass);
3202     return;
3203   }
3204   if (ik->is_being_initialized()) {
3205     if (C->needs_clinit_barrier(ik, context)) {
3206       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3207       guard_klass_being_initialized(klass);
3208       guard_init_thread(klass);
3209       insert_mem_bar(Op_MemBarCPUOrder);
3210     }
3211   } else if (ik->is_initialized()) {
3212     return; // no barrier needed
3213   } else {
3214     if (C->env()->task()->is_aot_compile()) {
3215       ResourceMark rm;
3216       log_debug(aot, compilation)("Emitting uncommon trap (clinit barrier) in AOT code for %s", ik->name()->as_klass_external_name());
3217     }
3218     uncommon_trap(Deoptimization::Reason_uninitialized,
3219                   Deoptimization::Action_reinterpret,
3220                   nullptr);
3221   }
3222 }
3223 
3224 //------------------------maybe_cast_profiled_receiver-------------------------
3225 // If the profile has seen exactly one type, narrow to exactly that type.
3226 // Subsequent type checks will always fold up.
3227 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3228                                              const TypeKlassPtr* require_klass,
3229                                              ciKlass* spec_klass,
3230                                              bool safe_for_replace) {
3231   if (!UseTypeProfile || !TypeProfileCasts) return nullptr;
3232 
3233   Deoptimization::DeoptReason reason = Deoptimization::reason_class_check(spec_klass != nullptr);
3234 
3235   // Make sure we haven't already deoptimized from this tactic.
3236   if (too_many_traps_or_recompiles(reason))
3237     return nullptr;
< prev index next >