< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

2988   if (UncommonNullCast               // Cutout for this technique
2989       && obj != null()               // And not the -Xcomp stupid case?
2990       && !too_many_traps(reason)
2991       ) {
2992     if (speculating) {
2993       return true;
2994     }
2995     if (data == nullptr)
2996       // Edge case:  no mature data.  Be optimistic here.
2997       return true;
2998     // If the profile has not seen a null, assume it won't happen.
2999     assert(java_bc() == Bytecodes::_checkcast ||
3000            java_bc() == Bytecodes::_instanceof ||
3001            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3002     return !data->as_BitData()->null_seen();
3003   }
3004   speculating = false;
3005   return false;
3006 }
3007 









































































3008 void GraphKit::guard_klass_being_initialized(Node* klass) {
3009   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3010   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3011   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3012                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3013                                     T_BYTE, MemNode::acquire);
3014   init_state = _gvn.transform(init_state);
3015 
3016   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3017 
3018   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3019   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3020 
3021   { BuildCutout unless(this, tst, PROB_MAX);
3022     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3023   }
3024 }
3025 
3026 void GraphKit::guard_init_thread(Node* klass) {
3027   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3028   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3029 
3030   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3031                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3032                                      T_ADDRESS, MemNode::unordered);
3033   init_thread = _gvn.transform(init_thread);
3034 
3035   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3036 
3037   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3038   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3039 
3040   { BuildCutout unless(this, tst, PROB_MAX);
3041     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3042   }
3043 }
3044 
3045 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3046   if (ik->is_being_initialized()) {
3047     if (C->needs_clinit_barrier(ik, context)) {
3048       Node* klass = makecon(TypeKlassPtr::make(ik));
3049       guard_klass_being_initialized(klass);
3050       guard_init_thread(klass);
3051       insert_mem_bar(Op_MemBarCPUOrder);
3052     }
3053   } else if (ik->is_initialized()) {
3054     return; // no barrier needed
3055   } else {
3056     uncommon_trap(Deoptimization::Reason_uninitialized,
3057                   Deoptimization::Action_reinterpret,
3058                   nullptr);
3059   }
3060 }
3061 
3062 //------------------------maybe_cast_profiled_receiver-------------------------
3063 // If the profile has seen exactly one type, narrow to exactly that type.
3064 // Subsequent type checks will always fold up.
3065 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3066                                              const TypeKlassPtr* require_klass,
3067                                              ciKlass* spec_klass,
3068                                              bool safe_for_replace) {

2988   if (UncommonNullCast               // Cutout for this technique
2989       && obj != null()               // And not the -Xcomp stupid case?
2990       && !too_many_traps(reason)
2991       ) {
2992     if (speculating) {
2993       return true;
2994     }
2995     if (data == nullptr)
2996       // Edge case:  no mature data.  Be optimistic here.
2997       return true;
2998     // If the profile has not seen a null, assume it won't happen.
2999     assert(java_bc() == Bytecodes::_checkcast ||
3000            java_bc() == Bytecodes::_instanceof ||
3001            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3002     return !data->as_BitData()->null_seen();
3003   }
3004   speculating = false;
3005   return false;
3006 }
3007 
3008 void GraphKit::guard_klass_is_initialized(Node* klass) {
3009   assert(ClassInitBarrierMode > 0, "no barriers");
3010   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3011   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3012   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3013                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3014                                     T_BYTE, MemNode::unordered);
3015   init_state = _gvn.transform(init_state);
3016 
3017   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
3018 
3019   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3020   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3021 
3022   switch (ClassInitBarrierMode) {
3023     case 1: { // uncommon trap on slow path
3024       BuildCutout unless(this, tst, PROB_MAX);
3025       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3026       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3027       break;
3028     }
3029     case 2: { // runtime call on slow path
3030       if (StressClassInitBarriers) {
3031         tst = makecon(TypeInt::ZERO); // always go through slow path
3032       }
3033       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3034 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3035 
3036       RegionNode* r = new RegionNode(3);
3037       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3038 
3039       set_control(_gvn.transform(new IfFalseNode(iff)));
3040 
3041       if (!stopped()) {
3042         kill_dead_locals();
3043 
3044         Node* call = make_runtime_call(RC_NO_LEAF,
3045                                        OptoRuntime::class_init_barrier_Type(),
3046                                        OptoRuntime::class_init_barrier_Java(),
3047                                        nullptr, TypePtr::BOTTOM,
3048                                        klass);
3049         // Deoptimization during class init barrier execution should trigger current bytecode reexecution.
3050         call->jvms()->set_should_reexecute(true);
3051 
3052         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3053         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3054         // Compilation fails when distinct exception states are combined.
3055         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3056 
3057         Node* fast_io  = call->in(TypeFunc::I_O);
3058         Node* fast_mem = call->in(TypeFunc::Memory);
3059         // These two phis are pre-filled with copies of of the fast IO and Memory
3060         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3061         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3062 
3063         r->init_req(2, control());
3064         io_phi->init_req(2, i_o());
3065         mem_phi->init_req(2, reset_memory());
3066 
3067         set_all_memory(_gvn.transform(mem_phi));
3068         set_i_o(_gvn.transform(io_phi));
3069       } else {
3070         r->init_req(2, top());
3071       }
3072       set_control(_gvn.transform(r));
3073       break;
3074     }
3075 
3076     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3077   }
3078   C->set_has_clinit_barriers(true);
3079 }
3080 
3081 void GraphKit::guard_klass_being_initialized(Node* klass) {
3082   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3083   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3084   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3085                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3086                                     T_BYTE, MemNode::acquire);
3087   init_state = _gvn.transform(init_state);
3088 
3089   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3090 
3091   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3092   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3093 
3094   { BuildCutout unless(this, tst, PROB_MAX);
3095     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3096   }
3097 }
3098 
3099 void GraphKit::guard_init_thread(Node* klass) {
3100   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3101   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3102 
3103   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3104                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3105                                      T_ADDRESS, MemNode::unordered);
3106   init_thread = _gvn.transform(init_thread);
3107 
3108   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3109 
3110   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3111   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3112 
3113   { BuildCutout unless(this, tst, PROB_MAX);
3114     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3115   }
3116 }
3117 
3118 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3119   if (C->do_clinit_barriers()) {
3120     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3121     guard_klass_is_initialized(klass);
3122     return;
3123   }
3124   if (ik->is_being_initialized()) {
3125     if (C->needs_clinit_barrier(ik, context)) {
3126       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3127       guard_klass_being_initialized(klass);
3128       guard_init_thread(klass);
3129       insert_mem_bar(Op_MemBarCPUOrder);
3130     }
3131   } else if (ik->is_initialized()) {
3132     return; // no barrier needed
3133   } else {
3134     uncommon_trap(Deoptimization::Reason_uninitialized,
3135                   Deoptimization::Action_reinterpret,
3136                   nullptr);
3137   }
3138 }
3139 
3140 //------------------------maybe_cast_profiled_receiver-------------------------
3141 // If the profile has seen exactly one type, narrow to exactly that type.
3142 // Subsequent type checks will always fold up.
3143 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3144                                              const TypeKlassPtr* require_klass,
3145                                              ciKlass* spec_klass,
3146                                              bool safe_for_replace) {
< prev index next >