< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

2986   if (UncommonNullCast               // Cutout for this technique
2987       && obj != null()               // And not the -Xcomp stupid case?
2988       && !too_many_traps(reason)
2989       ) {
2990     if (speculating) {
2991       return true;
2992     }
2993     if (data == nullptr)
2994       // Edge case:  no mature data.  Be optimistic here.
2995       return true;
2996     // If the profile has not seen a null, assume it won't happen.
2997     assert(java_bc() == Bytecodes::_checkcast ||
2998            java_bc() == Bytecodes::_instanceof ||
2999            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3000     return !data->as_BitData()->null_seen();
3001   }
3002   speculating = false;
3003   return false;
3004 }
3005 









































































3006 void GraphKit::guard_klass_being_initialized(Node* klass) {
3007   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3008   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3009   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3010                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3011                                     T_BYTE, MemNode::unordered);
3012   init_state = _gvn.transform(init_state);
3013 
3014   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3015 
3016   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3017   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3018 
3019   { BuildCutout unless(this, tst, PROB_MAX);
3020     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3021   }
3022 }
3023 
3024 void GraphKit::guard_init_thread(Node* klass) {
3025   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3026   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3027 
3028   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3029                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3030                                      T_ADDRESS, MemNode::unordered);
3031   init_thread = _gvn.transform(init_thread);
3032 
3033   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3034 
3035   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3036   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3037 
3038   { BuildCutout unless(this, tst, PROB_MAX);
3039     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3040   }
3041 }
3042 
3043 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3044   if (ik->is_being_initialized()) {
3045     if (C->needs_clinit_barrier(ik, context)) {
3046       Node* klass = makecon(TypeKlassPtr::make(ik));
3047       guard_klass_being_initialized(klass);
3048       guard_init_thread(klass);
3049       insert_mem_bar(Op_MemBarCPUOrder);
3050     }
3051   } else if (ik->is_initialized()) {
3052     return; // no barrier needed
3053   } else {
3054     uncommon_trap(Deoptimization::Reason_uninitialized,
3055                   Deoptimization::Action_reinterpret,
3056                   nullptr);
3057   }
3058 }
3059 
3060 //------------------------maybe_cast_profiled_receiver-------------------------
3061 // If the profile has seen exactly one type, narrow to exactly that type.
3062 // Subsequent type checks will always fold up.
3063 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3064                                              const TypeKlassPtr* require_klass,
3065                                              ciKlass* spec_klass,
3066                                              bool safe_for_replace) {

2986   if (UncommonNullCast               // Cutout for this technique
2987       && obj != null()               // And not the -Xcomp stupid case?
2988       && !too_many_traps(reason)
2989       ) {
2990     if (speculating) {
2991       return true;
2992     }
2993     if (data == nullptr)
2994       // Edge case:  no mature data.  Be optimistic here.
2995       return true;
2996     // If the profile has not seen a null, assume it won't happen.
2997     assert(java_bc() == Bytecodes::_checkcast ||
2998            java_bc() == Bytecodes::_instanceof ||
2999            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
3000     return !data->as_BitData()->null_seen();
3001   }
3002   speculating = false;
3003   return false;
3004 }
3005 
3006 void GraphKit::guard_klass_is_initialized(Node* klass) {
3007   assert(ClassInitBarrierMode > 0, "no barriers");
3008   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3009   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3010   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3011                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3012                                     T_BYTE, MemNode::unordered);
3013   init_state = _gvn.transform(init_state);
3014 
3015   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
3016 
3017   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3018   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3019 
3020   switch (ClassInitBarrierMode) {
3021     case 1: { // uncommon trap on slow path
3022       BuildCutout unless(this, tst, PROB_MAX);
3023       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3024       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3025       break;
3026     }
3027     case 2: { // runtime call on slow path
3028       if (StressClassInitBarriers) {
3029         tst = makecon(TypeInt::ZERO); // always go through slow path
3030       }
3031       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3032 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3033 
3034       RegionNode* r = new RegionNode(3);
3035       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3036 
3037       set_control(_gvn.transform(new IfFalseNode(iff)));
3038 
3039       if (!stopped()) {
3040         kill_dead_locals();
3041 
3042         Node* call = make_runtime_call(RC_NO_LEAF,
3043                                        OptoRuntime::class_init_barrier_Type(),
3044                                        OptoRuntime::class_init_barrier_Java(),
3045                                        nullptr, TypePtr::BOTTOM,
3046                                        klass);
3047         // Deoptimization during class init barrier execution should trigger current bytecode reexecution.
3048         call->jvms()->set_should_reexecute(true);
3049 
3050         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3051         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3052         // Compilation fails when distinct exception states are combined.
3053         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3054 
3055         Node* fast_io  = call->in(TypeFunc::I_O);
3056         Node* fast_mem = call->in(TypeFunc::Memory);
3057         // These two phis are pre-filled with copies of of the fast IO and Memory
3058         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3059         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3060 
3061         r->init_req(2, control());
3062         io_phi->init_req(2, i_o());
3063         mem_phi->init_req(2, reset_memory());
3064 
3065         set_all_memory(_gvn.transform(mem_phi));
3066         set_i_o(_gvn.transform(io_phi));
3067       } else {
3068         r->init_req(2, top());
3069       }
3070       set_control(_gvn.transform(r));
3071       break;
3072     }
3073 
3074     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3075   }
3076   C->set_has_clinit_barriers(true);
3077 }
3078 
3079 void GraphKit::guard_klass_being_initialized(Node* klass) {
3080   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3081   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3082   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3083                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3084                                     T_BYTE, MemNode::unordered);
3085   init_state = _gvn.transform(init_state);
3086 
3087   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3088 
3089   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3090   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3091 
3092   { BuildCutout unless(this, tst, PROB_MAX);
3093     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3094   }
3095 }
3096 
3097 void GraphKit::guard_init_thread(Node* klass) {
3098   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3099   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3100 
3101   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3102                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3103                                      T_ADDRESS, MemNode::unordered);
3104   init_thread = _gvn.transform(init_thread);
3105 
3106   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3107 
3108   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3109   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3110 
3111   { BuildCutout unless(this, tst, PROB_MAX);
3112     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3113   }
3114 }
3115 
3116 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3117   if (C->do_clinit_barriers()) {
3118     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3119     guard_klass_is_initialized(klass);
3120     return;
3121   }
3122   if (ik->is_being_initialized()) {
3123     if (C->needs_clinit_barrier(ik, context)) {
3124       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3125       guard_klass_being_initialized(klass);
3126       guard_init_thread(klass);
3127       insert_mem_bar(Op_MemBarCPUOrder);
3128     }
3129   } else if (ik->is_initialized()) {
3130     return; // no barrier needed
3131   } else {
3132     uncommon_trap(Deoptimization::Reason_uninitialized,
3133                   Deoptimization::Action_reinterpret,
3134                   nullptr);
3135   }
3136 }
3137 
3138 //------------------------maybe_cast_profiled_receiver-------------------------
3139 // If the profile has seen exactly one type, narrow to exactly that type.
3140 // Subsequent type checks will always fold up.
3141 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3142                                              const TypeKlassPtr* require_klass,
3143                                              ciKlass* spec_klass,
3144                                              bool safe_for_replace) {
< prev index next >