< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

2132   case Deoptimization::Action_make_not_entrant:
2133     C->set_trap_can_recompile(true);
2134     break;
2135   case Deoptimization::Action_none:
2136   case Deoptimization::Action_make_not_compilable:
2137     break;
2138   default:
2139 #ifdef ASSERT
2140     fatal("unknown action %d: %s", action, Deoptimization::trap_action_name(action));
2141 #endif
2142     break;
2143   }
2144 
2145   if (TraceOptoParse) {
2146     char buf[100];
2147     tty->print_cr("Uncommon trap %s at bci:%d",
2148                   Deoptimization::format_trap_request(buf, sizeof(buf),
2149                                                       trap_request), bci());
2150   }
2151 










2152   CompileLog* log = C->log();
2153   if (log != nullptr) {
2154     int kid = (klass == nullptr)? -1: log->identify(klass);
2155     log->begin_elem("uncommon_trap bci='%d'", bci());
2156     char buf[100];
2157     log->print(" %s", Deoptimization::format_trap_request(buf, sizeof(buf),
2158                                                           trap_request));
2159     if (kid >= 0)         log->print(" klass='%d'", kid);
2160     if (comment != nullptr)  log->print(" comment='%s'", comment);
2161     log->end_elem();
2162   }
2163 
2164   // Make sure any guarding test views this path as very unlikely
2165   Node *i0 = control()->in(0);
2166   if (i0 != nullptr && i0->is_If()) {        // Found a guarding if test?
2167     IfNode *iff = i0->as_If();
2168     float f = iff->_prob;   // Get prob
2169     if (control()->Opcode() == Op_IfTrue) {
2170       if (f > PROB_UNLIKELY_MAG(4))
2171         iff->_prob = PROB_MIN;

2973   if (UncommonNullCast               // Cutout for this technique
2974       && obj != null()               // And not the -Xcomp stupid case?
2975       && !too_many_traps(reason)
2976       ) {
2977     if (speculating) {
2978       return true;
2979     }
2980     if (data == nullptr)
2981       // Edge case:  no mature data.  Be optimistic here.
2982       return true;
2983     // If the profile has not seen a null, assume it won't happen.
2984     assert(java_bc() == Bytecodes::_checkcast ||
2985            java_bc() == Bytecodes::_instanceof ||
2986            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2987     return !data->as_BitData()->null_seen();
2988   }
2989   speculating = false;
2990   return false;
2991 }
2992 









































































2993 void GraphKit::guard_klass_being_initialized(Node* klass) {
2994   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
2995   Node* adr = basic_plus_adr(top(), klass, init_state_off);
2996   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
2997                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
2998                                     T_BYTE, MemNode::acquire);
2999   init_state = _gvn.transform(init_state);
3000 
3001   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3002 
3003   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3004   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3005 
3006   { BuildCutout unless(this, tst, PROB_MAX);
3007     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3008   }
3009 }
3010 
3011 void GraphKit::guard_init_thread(Node* klass) {
3012   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3013   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3014 
3015   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3016                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3017                                      T_ADDRESS, MemNode::unordered);
3018   init_thread = _gvn.transform(init_thread);
3019 
3020   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3021 
3022   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3023   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3024 
3025   { BuildCutout unless(this, tst, PROB_MAX);
3026     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3027   }
3028 }
3029 
3030 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3031   if (ik->is_being_initialized()) {
3032     if (C->needs_clinit_barrier(ik, context)) {
3033       Node* klass = makecon(TypeKlassPtr::make(ik));
3034       guard_klass_being_initialized(klass);
3035       guard_init_thread(klass);
3036       insert_mem_bar(Op_MemBarCPUOrder);
3037     }
3038   } else if (ik->is_initialized()) {
3039     return; // no barrier needed
3040   } else {
3041     uncommon_trap(Deoptimization::Reason_uninitialized,
3042                   Deoptimization::Action_reinterpret,
3043                   nullptr);
3044   }
3045 }
3046 
3047 //------------------------maybe_cast_profiled_receiver-------------------------
3048 // If the profile has seen exactly one type, narrow to exactly that type.
3049 // Subsequent type checks will always fold up.
3050 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3051                                              const TypeKlassPtr* require_klass,
3052                                              ciKlass* spec_klass,
3053                                              bool safe_for_replace) {

2132   case Deoptimization::Action_make_not_entrant:
2133     C->set_trap_can_recompile(true);
2134     break;
2135   case Deoptimization::Action_none:
2136   case Deoptimization::Action_make_not_compilable:
2137     break;
2138   default:
2139 #ifdef ASSERT
2140     fatal("unknown action %d: %s", action, Deoptimization::trap_action_name(action));
2141 #endif
2142     break;
2143   }
2144 
2145   if (TraceOptoParse) {
2146     char buf[100];
2147     tty->print_cr("Uncommon trap %s at bci:%d",
2148                   Deoptimization::format_trap_request(buf, sizeof(buf),
2149                                                       trap_request), bci());
2150   }
2151 
2152   if (PreloadReduceTraps && Compile::current()->for_preload() &&
2153       (action != Deoptimization::Action_none)) {
2154     ResourceMark rm;
2155     ciMethod* cim = Compile::current()->method();
2156     log_debug(aot, codecache, deoptimization)("Uncommon trap in preload code: reason=%s action=%s method=%s::%s bci=%d, %s",
2157                   Deoptimization::trap_reason_name(reason), Deoptimization::trap_action_name(action),
2158                   cim->holder()->name()->as_klass_external_name(), cim->name()->as_klass_external_name(),
2159                   bci(), comment);
2160   }
2161 
2162   CompileLog* log = C->log();
2163   if (log != nullptr) {
2164     int kid = (klass == nullptr)? -1: log->identify(klass);
2165     log->begin_elem("uncommon_trap bci='%d'", bci());
2166     char buf[100];
2167     log->print(" %s", Deoptimization::format_trap_request(buf, sizeof(buf),
2168                                                           trap_request));
2169     if (kid >= 0)         log->print(" klass='%d'", kid);
2170     if (comment != nullptr)  log->print(" comment='%s'", comment);
2171     log->end_elem();
2172   }
2173 
2174   // Make sure any guarding test views this path as very unlikely
2175   Node *i0 = control()->in(0);
2176   if (i0 != nullptr && i0->is_If()) {        // Found a guarding if test?
2177     IfNode *iff = i0->as_If();
2178     float f = iff->_prob;   // Get prob
2179     if (control()->Opcode() == Op_IfTrue) {
2180       if (f > PROB_UNLIKELY_MAG(4))
2181         iff->_prob = PROB_MIN;

2983   if (UncommonNullCast               // Cutout for this technique
2984       && obj != null()               // And not the -Xcomp stupid case?
2985       && !too_many_traps(reason)
2986       ) {
2987     if (speculating) {
2988       return true;
2989     }
2990     if (data == nullptr)
2991       // Edge case:  no mature data.  Be optimistic here.
2992       return true;
2993     // If the profile has not seen a null, assume it won't happen.
2994     assert(java_bc() == Bytecodes::_checkcast ||
2995            java_bc() == Bytecodes::_instanceof ||
2996            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2997     return !data->as_BitData()->null_seen();
2998   }
2999   speculating = false;
3000   return false;
3001 }
3002 
3003 void GraphKit::guard_klass_is_initialized(Node* klass) {
3004   assert(C->do_clinit_barriers(), "should be called only for clinit barriers");
3005   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3006   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3007   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3008                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3009                                     T_BYTE, MemNode::unordered);
3010   init_state = _gvn.transform(init_state);
3011 
3012   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
3013 
3014   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3015   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3016 
3017   switch (ClassInitBarrierMode) {
3018     case 1: { // uncommon trap on slow path
3019       BuildCutout unless(this, tst, PROB_MAX);
3020       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3021       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3022       break;
3023     }
3024     case 2: { // runtime call on slow path
3025       if (StressClassInitBarriers) {
3026         tst = makecon(TypeInt::ZERO); // always go through slow path
3027       }
3028       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3029 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3030 
3031       RegionNode* r = new RegionNode(3);
3032       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3033 
3034       set_control(_gvn.transform(new IfFalseNode(iff)));
3035 
3036       if (!stopped()) {
3037         kill_dead_locals();
3038 
3039         Node* call = make_runtime_call(RC_NO_LEAF,
3040                                        OptoRuntime::class_init_barrier_Type(),
3041                                        OptoRuntime::class_init_barrier_Java(),
3042                                        nullptr, TypePtr::BOTTOM,
3043                                        klass);
3044         // Deoptimization during class init barrier execution should trigger current bytecode reexecution.
3045         call->jvms()->set_should_reexecute(true);
3046 
3047         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3048         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3049         // Compilation fails when distinct exception states are combined.
3050         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3051 
3052         Node* fast_io  = call->in(TypeFunc::I_O);
3053         Node* fast_mem = call->in(TypeFunc::Memory);
3054         // These two phis are pre-filled with copies of of the fast IO and Memory
3055         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3056         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3057 
3058         r->init_req(2, control());
3059         io_phi->init_req(2, i_o());
3060         mem_phi->init_req(2, reset_memory());
3061 
3062         set_all_memory(_gvn.transform(mem_phi));
3063         set_i_o(_gvn.transform(io_phi));
3064       } else {
3065         r->init_req(2, top());
3066       }
3067       set_control(_gvn.transform(r));
3068       break;
3069     }
3070 
3071     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3072   }
3073   C->set_has_clinit_barriers(true);
3074 }
3075 
3076 void GraphKit::guard_klass_being_initialized(Node* klass) {
3077   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3078   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3079   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3080                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3081                                     T_BYTE, MemNode::acquire);
3082   init_state = _gvn.transform(init_state);
3083 
3084   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3085 
3086   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3087   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3088 
3089   { BuildCutout unless(this, tst, PROB_MAX);
3090     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3091   }
3092 }
3093 
3094 void GraphKit::guard_init_thread(Node* klass) {
3095   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3096   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3097 
3098   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3099                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3100                                      T_ADDRESS, MemNode::unordered);
3101   init_thread = _gvn.transform(init_thread);
3102 
3103   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3104 
3105   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3106   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3107 
3108   { BuildCutout unless(this, tst, PROB_MAX);
3109     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3110   }
3111 }
3112 
3113 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3114   if (C->do_clinit_barriers()) {
3115     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3116     guard_klass_is_initialized(klass);
3117     return;
3118   }
3119   if (ik->is_being_initialized()) {
3120     if (C->needs_clinit_barrier(ik, context)) {
3121       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3122       guard_klass_being_initialized(klass);
3123       guard_init_thread(klass);
3124       insert_mem_bar(Op_MemBarCPUOrder);
3125     }
3126   } else if (ik->is_initialized()) {
3127     return; // no barrier needed
3128   } else {
3129     uncommon_trap(Deoptimization::Reason_uninitialized,
3130                   Deoptimization::Action_reinterpret,
3131                   nullptr);
3132   }
3133 }
3134 
3135 //------------------------maybe_cast_profiled_receiver-------------------------
3136 // If the profile has seen exactly one type, narrow to exactly that type.
3137 // Subsequent type checks will always fold up.
3138 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3139                                              const TypeKlassPtr* require_klass,
3140                                              ciKlass* spec_klass,
3141                                              bool safe_for_replace) {
< prev index next >