< prev index next >

src/hotspot/share/opto/graphKit.cpp

Print this page

2976   if (UncommonNullCast               // Cutout for this technique
2977       && obj != null()               // And not the -Xcomp stupid case?
2978       && !too_many_traps(reason)
2979       ) {
2980     if (speculating) {
2981       return true;
2982     }
2983     if (data == nullptr)
2984       // Edge case:  no mature data.  Be optimistic here.
2985       return true;
2986     // If the profile has not seen a null, assume it won't happen.
2987     assert(java_bc() == Bytecodes::_checkcast ||
2988            java_bc() == Bytecodes::_instanceof ||
2989            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2990     return !data->as_BitData()->null_seen();
2991   }
2992   speculating = false;
2993   return false;
2994 }
2995 







































































2996 void GraphKit::guard_klass_being_initialized(Node* klass) {
2997   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
2998   Node* adr = basic_plus_adr(top(), klass, init_state_off);
2999   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3000                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3001                                     T_BYTE, MemNode::unordered);
3002   init_state = _gvn.transform(init_state);
3003 
3004   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3005 
3006   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3007   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3008 
3009   { BuildCutout unless(this, tst, PROB_MAX);
3010     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3011   }
3012 }
3013 
3014 void GraphKit::guard_init_thread(Node* klass) {
3015   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3016   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3017 
3018   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3019                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3020                                      T_ADDRESS, MemNode::unordered);
3021   init_thread = _gvn.transform(init_thread);
3022 
3023   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3024 
3025   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3026   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3027 
3028   { BuildCutout unless(this, tst, PROB_MAX);
3029     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3030   }
3031 }
3032 
3033 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {





3034   if (ik->is_being_initialized()) {
3035     if (C->needs_clinit_barrier(ik, context)) {
3036       Node* klass = makecon(TypeKlassPtr::make(ik));
3037       guard_klass_being_initialized(klass);
3038       guard_init_thread(klass);
3039       insert_mem_bar(Op_MemBarCPUOrder);
3040     }
3041   } else if (ik->is_initialized()) {
3042     return; // no barrier needed
3043   } else {
3044     uncommon_trap(Deoptimization::Reason_uninitialized,
3045                   Deoptimization::Action_reinterpret,
3046                   nullptr);
3047   }
3048 }
3049 
3050 //------------------------maybe_cast_profiled_receiver-------------------------
3051 // If the profile has seen exactly one type, narrow to exactly that type.
3052 // Subsequent type checks will always fold up.
3053 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3054                                              const TypeKlassPtr* require_klass,
3055                                              ciKlass* spec_klass,
3056                                              bool safe_for_replace) {

2976   if (UncommonNullCast               // Cutout for this technique
2977       && obj != null()               // And not the -Xcomp stupid case?
2978       && !too_many_traps(reason)
2979       ) {
2980     if (speculating) {
2981       return true;
2982     }
2983     if (data == nullptr)
2984       // Edge case:  no mature data.  Be optimistic here.
2985       return true;
2986     // If the profile has not seen a null, assume it won't happen.
2987     assert(java_bc() == Bytecodes::_checkcast ||
2988            java_bc() == Bytecodes::_instanceof ||
2989            java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2990     return !data->as_BitData()->null_seen();
2991   }
2992   speculating = false;
2993   return false;
2994 }
2995 
2996 void GraphKit::guard_klass_is_initialized(Node* klass) {
2997   assert(ClassInitBarrierMode > 0, "no barriers");
2998   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
2999   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3000   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3001                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3002                                     T_BYTE, MemNode::unordered);
3003   init_state = _gvn.transform(init_state);
3004 
3005   Node* initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
3006 
3007   Node* chk = _gvn.transform(new CmpINode(initialized_state, init_state));
3008   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3009 
3010   switch (ClassInitBarrierMode) {
3011     case 1: { // uncommon trap on slow path
3012       BuildCutout unless(this, tst, PROB_MAX);
3013       // Do not deoptimize this nmethod. Go to Interpreter to initialize class.
3014       uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3015       break;
3016     }
3017     case 2: { // runtime call on slow path
3018       if (StressClassInitBarriers) {
3019         tst = makecon(TypeInt::ZERO); // always go through slow path
3020       }
3021       IfNode* iff = create_and_xform_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3022 //    IfNode* iff = create_and_map_if(control(), tst, PROB_MAX, COUNT_UNKNOWN);
3023 
3024       RegionNode* r = new RegionNode(3);
3025       r->init_req(1, _gvn.transform(new IfTrueNode(iff)));
3026 
3027       set_control(_gvn.transform(new IfFalseNode(iff)));
3028 
3029       if (!stopped()) {
3030         kill_dead_locals();
3031 
3032         Node* call = make_runtime_call(RC_NO_LEAF,
3033                                        OptoRuntime::class_init_barrier_Type(),
3034                                        OptoRuntime::class_init_barrier_Java(),
3035                                        nullptr, TypePtr::BOTTOM,
3036                                        klass);
3037 
3038         // FIXME: deoptimize for now. deoptimize=false doesn't work with late inlining yet.
3039         // Parse::create_entry_map() introduces a barrier which uses distinct JVM state (*before* call).
3040         // Compilation fails when distinct exception states are combined.
3041         make_slow_call_ex(call, env()->Throwable_klass(), /*separate_io_proj=*/true, /*deoptimize=*/true);
3042 
3043         Node* fast_io  = call->in(TypeFunc::I_O);
3044         Node* fast_mem = call->in(TypeFunc::Memory);
3045         // These two phis are pre-filled with copies of of the fast IO and Memory
3046         Node* io_phi   = PhiNode::make(r, fast_io,  Type::ABIO);
3047         Node* mem_phi  = PhiNode::make(r, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
3048 
3049         r->init_req(2, control());
3050         io_phi->init_req(2, i_o());
3051         mem_phi->init_req(2, reset_memory());
3052 
3053         set_all_memory(_gvn.transform(mem_phi));
3054         set_i_o(_gvn.transform(io_phi));
3055       } else {
3056         r->init_req(2, top());
3057       }
3058       set_control(_gvn.transform(r));
3059       break;
3060     }
3061 
3062     default: fatal("unknown barrier mode: %d", ClassInitBarrierMode);
3063   }
3064   C->set_has_clinit_barriers(true);
3065 }
3066 
3067 void GraphKit::guard_klass_being_initialized(Node* klass) {
3068   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
3069   Node* adr = basic_plus_adr(top(), klass, init_state_off);
3070   Node* init_state = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3071                                     adr->bottom_type()->is_ptr(), TypeInt::BYTE,
3072                                     T_BYTE, MemNode::unordered);
3073   init_state = _gvn.transform(init_state);
3074 
3075   Node* being_initialized_state = makecon(TypeInt::make(InstanceKlass::being_initialized));
3076 
3077   Node* chk = _gvn.transform(new CmpINode(being_initialized_state, init_state));
3078   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3079 
3080   { BuildCutout unless(this, tst, PROB_MAX);
3081     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
3082   }
3083 }
3084 
3085 void GraphKit::guard_init_thread(Node* klass) {
3086   int init_thread_off = in_bytes(InstanceKlass::init_thread_offset());
3087   Node* adr = basic_plus_adr(top(), klass, init_thread_off);
3088 
3089   Node* init_thread = LoadNode::make(_gvn, nullptr, immutable_memory(), adr,
3090                                      adr->bottom_type()->is_ptr(), TypePtr::NOTNULL,
3091                                      T_ADDRESS, MemNode::unordered);
3092   init_thread = _gvn.transform(init_thread);
3093 
3094   Node* cur_thread = _gvn.transform(new ThreadLocalNode());
3095 
3096   Node* chk = _gvn.transform(new CmpPNode(cur_thread, init_thread));
3097   Node* tst = _gvn.transform(new BoolNode(chk, BoolTest::eq));
3098 
3099   { BuildCutout unless(this, tst, PROB_MAX);
3100     uncommon_trap(Deoptimization::Reason_uninitialized, Deoptimization::Action_none);
3101   }
3102 }
3103 
3104 void GraphKit::clinit_barrier(ciInstanceKlass* ik, ciMethod* context) {
3105   if (C->do_clinit_barriers()) {
3106     Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3107     guard_klass_is_initialized(klass);
3108     return;
3109   }
3110   if (ik->is_being_initialized()) {
3111     if (C->needs_clinit_barrier(ik, context)) {
3112       Node* klass = makecon(TypeKlassPtr::make(ik, Type::trust_interfaces));
3113       guard_klass_being_initialized(klass);
3114       guard_init_thread(klass);
3115       insert_mem_bar(Op_MemBarCPUOrder);
3116     }
3117   } else if (ik->is_initialized()) {
3118     return; // no barrier needed
3119   } else {
3120     uncommon_trap(Deoptimization::Reason_uninitialized,
3121                   Deoptimization::Action_reinterpret,
3122                   nullptr);
3123   }
3124 }
3125 
3126 //------------------------maybe_cast_profiled_receiver-------------------------
3127 // If the profile has seen exactly one type, narrow to exactly that type.
3128 // Subsequent type checks will always fold up.
3129 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
3130                                              const TypeKlassPtr* require_klass,
3131                                              ciKlass* spec_klass,
3132                                              bool safe_for_replace) {
< prev index next >